var/home/core/zuul-output/0000755000175000017500000000000015114627050014526 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114640341015470 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005240501615114640333017700 0ustar rootrootDec 05 19:13:40 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 19:13:40 crc restorecon[4687]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:40 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 19:13:41 crc restorecon[4687]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 19:13:41 crc restorecon[4687]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 19:13:41 crc kubenswrapper[4982]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 19:13:41 crc kubenswrapper[4982]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 19:13:41 crc kubenswrapper[4982]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 19:13:41 crc kubenswrapper[4982]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 19:13:41 crc kubenswrapper[4982]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 19:13:41 crc kubenswrapper[4982]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.209591 4982 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212171 4982 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212188 4982 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212193 4982 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212199 4982 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212204 4982 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212210 4982 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212215 4982 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212219 4982 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212224 4982 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212229 4982 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212233 4982 feature_gate.go:330] unrecognized feature gate: Example Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212238 4982 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212243 4982 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212248 4982 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212252 4982 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212256 4982 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212260 4982 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212264 4982 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212267 4982 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212271 4982 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212275 4982 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212285 4982 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212288 4982 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212292 4982 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212296 4982 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212299 4982 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212314 4982 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212318 4982 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212323 4982 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212327 4982 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212331 4982 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212334 4982 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212339 4982 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212343 4982 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212348 4982 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212352 4982 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212355 4982 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212360 4982 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212364 4982 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212368 4982 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212372 4982 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212375 4982 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212379 4982 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212384 4982 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212387 4982 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212392 4982 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212397 4982 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212401 4982 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212405 4982 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212409 4982 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212413 4982 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212417 4982 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212421 4982 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212424 4982 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212427 4982 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212431 4982 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212437 4982 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212442 4982 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212446 4982 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212449 4982 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212453 4982 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212457 4982 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212460 4982 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212464 4982 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212467 4982 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212470 4982 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212474 4982 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212477 4982 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212481 4982 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212485 4982 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.212488 4982 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212709 4982 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212719 4982 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212726 4982 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212732 4982 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212740 4982 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212745 4982 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212751 4982 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212758 4982 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212762 4982 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212767 4982 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212771 4982 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212776 4982 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212780 4982 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212784 4982 flags.go:64] FLAG: --cgroup-root="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212788 4982 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212792 4982 flags.go:64] FLAG: --client-ca-file="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212796 4982 flags.go:64] FLAG: --cloud-config="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212800 4982 flags.go:64] FLAG: --cloud-provider="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212804 4982 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212810 4982 flags.go:64] FLAG: --cluster-domain="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212815 4982 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212819 4982 flags.go:64] FLAG: --config-dir="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212824 4982 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212829 4982 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212835 4982 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212840 4982 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212844 4982 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212848 4982 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212853 4982 flags.go:64] FLAG: --contention-profiling="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212857 4982 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212861 4982 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212866 4982 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212870 4982 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212876 4982 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212882 4982 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212886 4982 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212891 4982 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212897 4982 flags.go:64] FLAG: --enable-server="true" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212902 4982 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212908 4982 flags.go:64] FLAG: --event-burst="100" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212913 4982 flags.go:64] FLAG: --event-qps="50" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212918 4982 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212922 4982 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212927 4982 flags.go:64] FLAG: --eviction-hard="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212932 4982 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212936 4982 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212941 4982 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212946 4982 flags.go:64] FLAG: --eviction-soft="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212950 4982 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212955 4982 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212959 4982 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212963 4982 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212968 4982 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212972 4982 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212976 4982 flags.go:64] FLAG: --feature-gates="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212982 4982 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212986 4982 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212991 4982 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.212996 4982 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213000 4982 flags.go:64] FLAG: --healthz-port="10248" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213004 4982 flags.go:64] FLAG: --help="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213008 4982 flags.go:64] FLAG: --hostname-override="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213012 4982 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213017 4982 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213021 4982 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213025 4982 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213028 4982 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213033 4982 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213037 4982 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213041 4982 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213046 4982 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213051 4982 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213055 4982 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213060 4982 flags.go:64] FLAG: --kube-reserved="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213065 4982 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213069 4982 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213073 4982 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213078 4982 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213082 4982 flags.go:64] FLAG: --lock-file="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213087 4982 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213091 4982 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213096 4982 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213102 4982 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213106 4982 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213110 4982 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213114 4982 flags.go:64] FLAG: --logging-format="text" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213118 4982 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213123 4982 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213127 4982 flags.go:64] FLAG: --manifest-url="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213131 4982 flags.go:64] FLAG: --manifest-url-header="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213136 4982 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213141 4982 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213162 4982 flags.go:64] FLAG: --max-pods="110" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213168 4982 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213174 4982 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213179 4982 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213183 4982 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213187 4982 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213191 4982 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213195 4982 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213206 4982 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213210 4982 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213214 4982 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213218 4982 flags.go:64] FLAG: --pod-cidr="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213223 4982 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213229 4982 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213233 4982 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213237 4982 flags.go:64] FLAG: --pods-per-core="0" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213241 4982 flags.go:64] FLAG: --port="10250" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213246 4982 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213250 4982 flags.go:64] FLAG: --provider-id="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213255 4982 flags.go:64] FLAG: --qos-reserved="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213259 4982 flags.go:64] FLAG: --read-only-port="10255" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213263 4982 flags.go:64] FLAG: --register-node="true" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213267 4982 flags.go:64] FLAG: --register-schedulable="true" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213271 4982 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213279 4982 flags.go:64] FLAG: --registry-burst="10" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213284 4982 flags.go:64] FLAG: --registry-qps="5" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213289 4982 flags.go:64] FLAG: --reserved-cpus="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213293 4982 flags.go:64] FLAG: --reserved-memory="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213299 4982 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213303 4982 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213308 4982 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213312 4982 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213316 4982 flags.go:64] FLAG: --runonce="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213320 4982 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213324 4982 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213329 4982 flags.go:64] FLAG: --seccomp-default="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213333 4982 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213337 4982 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213341 4982 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213345 4982 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213350 4982 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213354 4982 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213358 4982 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213362 4982 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213366 4982 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213370 4982 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213374 4982 flags.go:64] FLAG: --system-cgroups="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213378 4982 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213384 4982 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213388 4982 flags.go:64] FLAG: --tls-cert-file="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213392 4982 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213397 4982 flags.go:64] FLAG: --tls-min-version="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213401 4982 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213406 4982 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213410 4982 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213414 4982 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213418 4982 flags.go:64] FLAG: --v="2" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213424 4982 flags.go:64] FLAG: --version="false" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213437 4982 flags.go:64] FLAG: --vmodule="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213442 4982 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213447 4982 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213552 4982 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213557 4982 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213562 4982 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213567 4982 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213571 4982 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213574 4982 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213578 4982 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213582 4982 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213585 4982 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213589 4982 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213593 4982 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213596 4982 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213600 4982 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213604 4982 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213608 4982 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213611 4982 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213615 4982 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213618 4982 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213622 4982 feature_gate.go:330] unrecognized feature gate: Example Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213625 4982 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213629 4982 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213633 4982 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213636 4982 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213640 4982 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213644 4982 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213649 4982 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213653 4982 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213657 4982 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213661 4982 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213669 4982 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213673 4982 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213677 4982 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213682 4982 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213686 4982 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213690 4982 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213701 4982 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213704 4982 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213708 4982 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213712 4982 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213715 4982 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213719 4982 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213723 4982 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213726 4982 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213730 4982 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213733 4982 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213737 4982 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213740 4982 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213744 4982 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213747 4982 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213751 4982 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213755 4982 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213758 4982 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213762 4982 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213765 4982 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213769 4982 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213773 4982 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213776 4982 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213780 4982 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213783 4982 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213787 4982 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213790 4982 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213795 4982 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213799 4982 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213802 4982 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213806 4982 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213810 4982 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213815 4982 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213820 4982 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213824 4982 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213828 4982 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.213831 4982 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.213838 4982 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.227057 4982 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.227557 4982 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227751 4982 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227771 4982 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227781 4982 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227791 4982 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227801 4982 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227809 4982 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227817 4982 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227828 4982 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227842 4982 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227851 4982 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227860 4982 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227868 4982 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227876 4982 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227884 4982 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227892 4982 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227900 4982 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227908 4982 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227917 4982 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227926 4982 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227934 4982 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227942 4982 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227949 4982 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227957 4982 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227965 4982 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227973 4982 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227982 4982 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.227991 4982 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228001 4982 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228011 4982 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228021 4982 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228033 4982 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228043 4982 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228052 4982 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228060 4982 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228068 4982 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228076 4982 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228084 4982 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228092 4982 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228103 4982 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228113 4982 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228123 4982 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228132 4982 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228141 4982 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228182 4982 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228190 4982 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228199 4982 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228208 4982 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228216 4982 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228224 4982 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228233 4982 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228242 4982 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228250 4982 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228259 4982 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228269 4982 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228277 4982 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228285 4982 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228294 4982 feature_gate.go:330] unrecognized feature gate: Example Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228343 4982 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228351 4982 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228360 4982 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228368 4982 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228376 4982 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228384 4982 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228394 4982 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228402 4982 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228410 4982 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228418 4982 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228426 4982 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228434 4982 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228443 4982 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228450 4982 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.228464 4982 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228708 4982 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228723 4982 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228731 4982 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228740 4982 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228749 4982 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228757 4982 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228765 4982 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228773 4982 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228781 4982 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228789 4982 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228797 4982 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228805 4982 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228813 4982 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228822 4982 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228832 4982 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228845 4982 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228856 4982 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228866 4982 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228874 4982 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228883 4982 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228891 4982 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228899 4982 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228908 4982 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228916 4982 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228924 4982 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228933 4982 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228940 4982 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228948 4982 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228958 4982 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228969 4982 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228978 4982 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228987 4982 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.228995 4982 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229004 4982 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229012 4982 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229019 4982 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229028 4982 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229041 4982 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229051 4982 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229059 4982 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229068 4982 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229078 4982 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229086 4982 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229094 4982 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229103 4982 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229111 4982 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229120 4982 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229129 4982 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229137 4982 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229145 4982 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229177 4982 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229185 4982 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229193 4982 feature_gate.go:330] unrecognized feature gate: Example Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229201 4982 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229209 4982 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229216 4982 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229225 4982 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229233 4982 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229241 4982 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229248 4982 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229256 4982 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229264 4982 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229272 4982 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229280 4982 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229288 4982 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229295 4982 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229303 4982 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229311 4982 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229319 4982 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229327 4982 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.229334 4982 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.229347 4982 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.229970 4982 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.234369 4982 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.234525 4982 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.235327 4982 server.go:997] "Starting client certificate rotation" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.235367 4982 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.235565 4982 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-24 22:08:33.947569038 +0000 UTC Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.235660 4982 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.241876 4982 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.243424 4982 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.94:6443: connect: connection refused" logger="UnhandledError" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.244051 4982 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.259141 4982 log.go:25] "Validated CRI v1 runtime API" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.277627 4982 log.go:25] "Validated CRI v1 image API" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.279611 4982 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.282694 4982 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-19-09-33-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.282742 4982 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.314064 4982 manager.go:217] Machine: {Timestamp:2025-12-05 19:13:41.311213009 +0000 UTC m=+0.193099094 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654124544 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:3fb999fe-b94a-4144-86b7-b9a7445c3e37 BootID:c738741a-5d3a-4a2b-9f49-09675e56a75b Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827060224 Type:vfs Inodes:4108169 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108169 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:49:60:ac Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:49:60:ac Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:5c:0c:b8 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:da:ea:ff Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:dc:c5:9b Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:9e:1b:a4 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:9a:e9:ad:2c:38:3d Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:ee:34:db:49:f9:20 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654124544 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.314614 4982 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.314852 4982 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.315845 4982 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.316352 4982 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.316443 4982 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.316938 4982 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.317007 4982 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.317430 4982 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.317521 4982 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.317859 4982 state_mem.go:36] "Initialized new in-memory state store" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.318641 4982 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.319911 4982 kubelet.go:418] "Attempting to sync node with API server" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.319967 4982 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.320028 4982 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.320059 4982 kubelet.go:324] "Adding apiserver pod source" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.320086 4982 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.322031 4982 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.94:6443: connect: connection refused Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.322119 4982 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.94:6443: connect: connection refused" logger="UnhandledError" Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.322297 4982 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.94:6443: connect: connection refused Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.322474 4982 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.94:6443: connect: connection refused" logger="UnhandledError" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.323098 4982 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.323751 4982 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.324991 4982 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.325867 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.325912 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.325928 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.325941 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.325963 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.325977 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.325991 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.326013 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.326034 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.326052 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.326078 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.326094 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.326878 4982 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.327704 4982 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.94:6443: connect: connection refused Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.327739 4982 server.go:1280] "Started kubelet" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.328006 4982 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.328740 4982 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.329540 4982 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.329406 4982 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.94:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e679b734a7b98 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 19:13:41.327666072 +0000 UTC m=+0.209552097,LastTimestamp:2025-12-05 19:13:41.327666072 +0000 UTC m=+0.209552097,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 19:13:41 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.331539 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.331606 4982 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.331917 4982 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.331926 4982 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.331937 4982 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.331943 4982 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.332184 4982 server.go:460] "Adding debug handlers to kubelet server" Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.332272 4982 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.94:6443: connect: connection refused Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.332322 4982 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.94:6443: connect: connection refused" logger="UnhandledError" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.331900 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 13:37:16.592893921 +0000 UTC Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.335899 4982 factory.go:55] Registering systemd factory Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.335926 4982 factory.go:221] Registration of the systemd container factory successfully Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.334839 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" interval="200ms" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.337335 4982 factory.go:153] Registering CRI-O factory Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.337358 4982 factory.go:221] Registration of the crio container factory successfully Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.337488 4982 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.337536 4982 factory.go:103] Registering Raw factory Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.337571 4982 manager.go:1196] Started watching for new ooms in manager Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.338818 4982 manager.go:319] Starting recovery of all containers Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343654 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343766 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343783 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343798 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343812 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343825 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343837 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343849 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343864 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343878 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343892 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343903 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343921 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343938 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343951 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343972 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.343989 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344005 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344018 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344030 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344041 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344054 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344066 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344078 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344091 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344104 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344119 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344135 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344162 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344175 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344190 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344226 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344260 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344284 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344297 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344334 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344350 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344362 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344379 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344394 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344409 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344421 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344434 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344448 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344461 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344474 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344488 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344500 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344516 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344534 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344547 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344562 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344582 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344595 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344611 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344625 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344638 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344653 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344664 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344678 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344693 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344708 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344725 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344742 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344760 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344771 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344785 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344798 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344810 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344831 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344842 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344853 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344865 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344877 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344888 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344901 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344920 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344937 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344949 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344959 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344975 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344985 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.344997 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345007 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345020 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345032 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345043 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345056 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345068 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345080 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345092 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345103 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345116 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345127 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345140 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345166 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345180 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345192 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345205 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345217 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345229 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345240 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345256 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345269 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345291 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345304 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345316 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345336 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345347 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345359 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345372 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345386 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345403 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345415 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345428 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345442 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345458 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345469 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345485 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345497 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345509 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345521 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345535 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345548 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345562 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345580 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345594 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345607 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345626 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345638 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345681 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345694 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345705 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345717 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345729 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345740 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.345757 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349053 4982 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349257 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349312 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349360 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349394 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349426 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349459 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349500 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349531 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349564 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349598 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349628 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349657 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349689 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349733 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349767 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349799 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349828 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349856 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349884 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349914 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349943 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.349975 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350012 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350039 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350070 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350096 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350129 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350193 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350227 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350258 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350288 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350320 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350350 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350380 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350415 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350445 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350470 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350498 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350560 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350587 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350616 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350643 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350683 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350735 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350762 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350791 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350867 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350915 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350944 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350970 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.350996 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.351025 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.351072 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.351103 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.351184 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.351216 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.351244 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.351270 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.351320 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.351395 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.351426 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.351456 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.352487 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.352648 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.352694 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.352724 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.352753 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.352781 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.352808 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.352837 4982 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.352945 4982 reconstruct.go:97] "Volume reconstruction finished" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.352966 4982 reconciler.go:26] "Reconciler: start to sync state" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.361834 4982 manager.go:324] Recovery completed Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.376485 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.379381 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.379437 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.379465 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.385943 4982 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.386063 4982 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.386164 4982 state_mem.go:36] "Initialized new in-memory state store" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.387207 4982 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.388880 4982 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.388945 4982 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.388985 4982 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.389064 4982 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.392649 4982 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.94:6443: connect: connection refused Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.392729 4982 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.94:6443: connect: connection refused" logger="UnhandledError" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.394720 4982 policy_none.go:49] "None policy: Start" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.395687 4982 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.395722 4982 state_mem.go:35] "Initializing new in-memory state store" Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.432778 4982 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.456653 4982 manager.go:334] "Starting Device Plugin manager" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.456762 4982 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.456776 4982 server.go:79] "Starting device plugin registration server" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.457230 4982 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.457246 4982 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.457587 4982 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.457734 4982 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.457750 4982 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.468958 4982 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.489221 4982 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.489334 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.490574 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.490614 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.490626 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.490785 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.491242 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.491318 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.491537 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.491573 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.491585 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.491683 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.491934 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.492001 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.492579 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.492599 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.492618 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.492634 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.492664 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.492677 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.492965 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.493201 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.493272 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.493517 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.493544 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.493556 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.494004 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.494034 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.494045 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.494229 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.494250 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.494258 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.494400 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.494704 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.494783 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.495045 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.495080 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.495091 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.495338 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.495375 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.495786 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.495823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.495837 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.496253 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.496286 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.496299 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.537861 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" interval="400ms" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.557814 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.557848 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.557915 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.557968 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.558009 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.558051 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.558097 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.558178 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.558232 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.558321 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.558398 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.558422 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.558442 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.558459 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.558476 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.558492 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.559765 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.559830 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.559848 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.559894 4982 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.560631 4982 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.94:6443: connect: connection refused" node="crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.660087 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.660211 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.660302 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.660356 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.660386 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.660357 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.660859 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.660932 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.660944 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.660988 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661010 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661027 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661061 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661075 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661314 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661352 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661364 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661412 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661417 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661476 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661505 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.660927 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661569 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661577 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661512 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661616 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661628 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661536 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.661767 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.663904 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.761603 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.765045 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.765099 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.765111 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.765140 4982 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.765416 4982 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.94:6443: connect: connection refused" node="crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.839038 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.848679 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.874486 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.892854 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-887da9b430477dcb82722fce64d668fc4d5cb2d427bc8aa579cce31fa5f22d9d WatchSource:0}: Error finding container 887da9b430477dcb82722fce64d668fc4d5cb2d427bc8aa579cce31fa5f22d9d: Status 404 returned error can't find the container with id 887da9b430477dcb82722fce64d668fc4d5cb2d427bc8aa579cce31fa5f22d9d Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.894699 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-859e0fb599b70e428413dc2ce64d37ca1dbe6d2b2adad8f5e8f7c849227336dd WatchSource:0}: Error finding container 859e0fb599b70e428413dc2ce64d37ca1dbe6d2b2adad8f5e8f7c849227336dd: Status 404 returned error can't find the container with id 859e0fb599b70e428413dc2ce64d37ca1dbe6d2b2adad8f5e8f7c849227336dd Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.902685 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-a65a24ec77039a9e3204e431fefea0c07ff445e6fe54f9e52408b8d0b2f6413b WatchSource:0}: Error finding container a65a24ec77039a9e3204e431fefea0c07ff445e6fe54f9e52408b8d0b2f6413b: Status 404 returned error can't find the container with id a65a24ec77039a9e3204e431fefea0c07ff445e6fe54f9e52408b8d0b2f6413b Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.904288 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: I1205 19:13:41.913837 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.929864 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-86d173e385938f67c3b8181bda91f15acb48c546c999d8e19e1a40d87a018a27 WatchSource:0}: Error finding container 86d173e385938f67c3b8181bda91f15acb48c546c999d8e19e1a40d87a018a27: Status 404 returned error can't find the container with id 86d173e385938f67c3b8181bda91f15acb48c546c999d8e19e1a40d87a018a27 Dec 05 19:13:41 crc kubenswrapper[4982]: E1205 19:13:41.940025 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" interval="800ms" Dec 05 19:13:41 crc kubenswrapper[4982]: W1205 19:13:41.942356 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-d0c4b688624dcc9c921fa68c49003a0d778d047f1fcdf728595f8533dcbdc40e WatchSource:0}: Error finding container d0c4b688624dcc9c921fa68c49003a0d778d047f1fcdf728595f8533dcbdc40e: Status 404 returned error can't find the container with id d0c4b688624dcc9c921fa68c49003a0d778d047f1fcdf728595f8533dcbdc40e Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.165635 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.167480 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.167536 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.167554 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.167591 4982 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 19:13:42 crc kubenswrapper[4982]: E1205 19:13:42.168312 4982 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.94:6443: connect: connection refused" node="crc" Dec 05 19:13:42 crc kubenswrapper[4982]: W1205 19:13:42.204056 4982 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.94:6443: connect: connection refused Dec 05 19:13:42 crc kubenswrapper[4982]: E1205 19:13:42.204181 4982 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.94:6443: connect: connection refused" logger="UnhandledError" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.328926 4982 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.94:6443: connect: connection refused Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.336217 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 15:10:48.536919808 +0000 UTC Dec 05 19:13:42 crc kubenswrapper[4982]: W1205 19:13:42.384296 4982 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.94:6443: connect: connection refused Dec 05 19:13:42 crc kubenswrapper[4982]: E1205 19:13:42.384418 4982 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.94:6443: connect: connection refused" logger="UnhandledError" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.397438 4982 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967" exitCode=0 Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.397554 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967"} Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.397720 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a65a24ec77039a9e3204e431fefea0c07ff445e6fe54f9e52408b8d0b2f6413b"} Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.397875 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.399526 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.399560 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.399571 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.400473 4982 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="d0f52f5535022ad363e090d1f9e0015b8dd80610a7e792b4b81ee07aa2dccd74" exitCode=0 Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.400549 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"d0f52f5535022ad363e090d1f9e0015b8dd80610a7e792b4b81ee07aa2dccd74"} Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.400586 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"887da9b430477dcb82722fce64d668fc4d5cb2d427bc8aa579cce31fa5f22d9d"} Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.400781 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.401990 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.402012 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.402024 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.402762 4982 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b" exitCode=0 Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.402808 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b"} Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.402829 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"859e0fb599b70e428413dc2ce64d37ca1dbe6d2b2adad8f5e8f7c849227336dd"} Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.402902 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.404200 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.404241 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.404258 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.405869 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a"} Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.405923 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d0c4b688624dcc9c921fa68c49003a0d778d047f1fcdf728595f8533dcbdc40e"} Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.407171 4982 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d" exitCode=0 Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.407202 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d"} Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.407221 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"86d173e385938f67c3b8181bda91f15acb48c546c999d8e19e1a40d87a018a27"} Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.407320 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.408250 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.408278 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.408289 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.410352 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.412949 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.413010 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.413023 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:42 crc kubenswrapper[4982]: W1205 19:13:42.730639 4982 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.94:6443: connect: connection refused Dec 05 19:13:42 crc kubenswrapper[4982]: E1205 19:13:42.730771 4982 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.94:6443: connect: connection refused" logger="UnhandledError" Dec 05 19:13:42 crc kubenswrapper[4982]: E1205 19:13:42.740943 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" interval="1.6s" Dec 05 19:13:42 crc kubenswrapper[4982]: W1205 19:13:42.885862 4982 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.94:6443: connect: connection refused Dec 05 19:13:42 crc kubenswrapper[4982]: E1205 19:13:42.885981 4982 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.94:6443: connect: connection refused" logger="UnhandledError" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.968408 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.970248 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.970340 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.970358 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:42 crc kubenswrapper[4982]: I1205 19:13:42.970421 4982 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.252679 4982 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.337383 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 17:44:20.236879696 +0000 UTC Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.337459 4982 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 574h30m36.899422732s for next certificate rotation Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.413742 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953"} Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.413792 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58"} Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.413803 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b"} Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.413812 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34"} Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.415645 4982 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4" exitCode=0 Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.415676 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4"} Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.416033 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.417752 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"debf150e35afe2a1027eaa61b2b2730a48faaad43f1bcf3537cad511c9d4b14e"} Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.417903 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.418730 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.418759 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.418769 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.418809 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.418824 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.418832 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.431747 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"71882afeb562c377bde2acd7f21f8e3431eb531d7f9f07930ae174dd34f60940"} Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.431828 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9cc02cf255b6f308b2b5209ba4b2a4d0ccf9e6ba461ddde390eb6ee53ea78c14"} Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.431842 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"fdecba63a5b8fd4acc03e2440acafeabcdefcdb278f037117c7b5de2cf445e79"} Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.431963 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.433119 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.433219 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.433232 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.435670 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38"} Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.435793 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6"} Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.435851 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409"} Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.435865 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.436862 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.436904 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.436918 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.788230 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:43 crc kubenswrapper[4982]: I1205 19:13:43.793497 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.444505 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646"} Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.444731 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.447238 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.447340 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.447405 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.448902 4982 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf" exitCode=0 Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.449096 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.449347 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf"} Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.449560 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.450746 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.450825 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.450855 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.450788 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.450911 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.450938 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.874949 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.875256 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.876832 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.876960 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:44 crc kubenswrapper[4982]: I1205 19:13:44.876988 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.456588 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f"} Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.456676 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367"} Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.456697 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643"} Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.456633 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.457128 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.457145 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.457246 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.460836 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.460902 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.460895 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.460928 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.460952 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:45 crc kubenswrapper[4982]: I1205 19:13:45.460966 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.082653 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.465949 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76"} Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.466024 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.466036 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3"} Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.466192 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.466288 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.467505 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.467553 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.467570 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.467684 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.467719 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.467737 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.467926 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.468001 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.468028 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:46 crc kubenswrapper[4982]: I1205 19:13:46.969136 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 19:13:47 crc kubenswrapper[4982]: I1205 19:13:47.350737 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:47 crc kubenswrapper[4982]: I1205 19:13:47.468810 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:47 crc kubenswrapper[4982]: I1205 19:13:47.468814 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:47 crc kubenswrapper[4982]: I1205 19:13:47.470023 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:47 crc kubenswrapper[4982]: I1205 19:13:47.470069 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:47 crc kubenswrapper[4982]: I1205 19:13:47.470088 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:47 crc kubenswrapper[4982]: I1205 19:13:47.470694 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:47 crc kubenswrapper[4982]: I1205 19:13:47.470763 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:47 crc kubenswrapper[4982]: I1205 19:13:47.470791 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:47 crc kubenswrapper[4982]: I1205 19:13:47.969242 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.059674 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.059966 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.061871 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.061958 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.061986 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.471605 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.471699 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.473019 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.473105 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.473133 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.473555 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.473610 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:48 crc kubenswrapper[4982]: I1205 19:13:48.473627 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:51 crc kubenswrapper[4982]: E1205 19:13:51.469118 4982 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 19:13:52 crc kubenswrapper[4982]: E1205 19:13:52.971981 4982 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 05 19:13:53 crc kubenswrapper[4982]: E1205 19:13:53.254392 4982 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.269070 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.269501 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.272072 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.272135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.272220 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.277411 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.329328 4982 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.486080 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.487879 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.487942 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.487961 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.900688 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.901014 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.904076 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.904138 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:53 crc kubenswrapper[4982]: I1205 19:13:53.904182 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:54 crc kubenswrapper[4982]: E1205 19:13:54.342677 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 05 19:13:54 crc kubenswrapper[4982]: W1205 19:13:54.497021 4982 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 05 19:13:54 crc kubenswrapper[4982]: I1205 19:13:54.497243 4982 trace.go:236] Trace[93583255]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 19:13:44.495) (total time: 10001ms): Dec 05 19:13:54 crc kubenswrapper[4982]: Trace[93583255]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (19:13:54.496) Dec 05 19:13:54 crc kubenswrapper[4982]: Trace[93583255]: [10.001946177s] [10.001946177s] END Dec 05 19:13:54 crc kubenswrapper[4982]: E1205 19:13:54.497296 4982 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 19:13:54 crc kubenswrapper[4982]: W1205 19:13:54.550879 4982 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 05 19:13:54 crc kubenswrapper[4982]: I1205 19:13:54.551065 4982 trace.go:236] Trace[741056167]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 19:13:44.548) (total time: 10002ms): Dec 05 19:13:54 crc kubenswrapper[4982]: Trace[741056167]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (19:13:54.550) Dec 05 19:13:54 crc kubenswrapper[4982]: Trace[741056167]: [10.002219464s] [10.002219464s] END Dec 05 19:13:54 crc kubenswrapper[4982]: E1205 19:13:54.551116 4982 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 19:13:54 crc kubenswrapper[4982]: I1205 19:13:54.572760 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:13:54 crc kubenswrapper[4982]: I1205 19:13:54.574600 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:13:54 crc kubenswrapper[4982]: I1205 19:13:54.574659 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:13:54 crc kubenswrapper[4982]: I1205 19:13:54.574668 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:13:54 crc kubenswrapper[4982]: I1205 19:13:54.574750 4982 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 19:13:54 crc kubenswrapper[4982]: W1205 19:13:54.892533 4982 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 05 19:13:54 crc kubenswrapper[4982]: I1205 19:13:54.892672 4982 trace.go:236] Trace[1863516890]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 19:13:44.890) (total time: 10002ms): Dec 05 19:13:54 crc kubenswrapper[4982]: Trace[1863516890]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (19:13:54.892) Dec 05 19:13:54 crc kubenswrapper[4982]: Trace[1863516890]: [10.002079726s] [10.002079726s] END Dec 05 19:13:54 crc kubenswrapper[4982]: E1205 19:13:54.892702 4982 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 19:13:55 crc kubenswrapper[4982]: W1205 19:13:55.040033 4982 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 05 19:13:55 crc kubenswrapper[4982]: I1205 19:13:55.040206 4982 trace.go:236] Trace[1342722537]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 19:13:45.037) (total time: 10002ms): Dec 05 19:13:55 crc kubenswrapper[4982]: Trace[1342722537]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (19:13:55.040) Dec 05 19:13:55 crc kubenswrapper[4982]: Trace[1342722537]: [10.002178698s] [10.002178698s] END Dec 05 19:13:55 crc kubenswrapper[4982]: E1205 19:13:55.040236 4982 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 19:13:56 crc kubenswrapper[4982]: I1205 19:13:56.269587 4982 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 19:13:56 crc kubenswrapper[4982]: I1205 19:13:56.269719 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 19:13:57 crc kubenswrapper[4982]: I1205 19:13:57.451413 4982 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 19:13:57 crc kubenswrapper[4982]: I1205 19:13:57.970275 4982 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 19:13:57 crc kubenswrapper[4982]: I1205 19:13:57.970410 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 19:13:59 crc kubenswrapper[4982]: I1205 19:13:59.066339 4982 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 19:13:59 crc kubenswrapper[4982]: I1205 19:13:59.066439 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 19:14:01 crc kubenswrapper[4982]: I1205 19:14:01.184365 4982 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 19:14:01 crc kubenswrapper[4982]: E1205 19:14:01.469360 4982 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 19:14:02 crc kubenswrapper[4982]: I1205 19:14:02.977786 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:14:02 crc kubenswrapper[4982]: I1205 19:14:02.978049 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:14:02 crc kubenswrapper[4982]: I1205 19:14:02.979830 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:02 crc kubenswrapper[4982]: I1205 19:14:02.979908 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:02 crc kubenswrapper[4982]: I1205 19:14:02.979934 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:02 crc kubenswrapper[4982]: I1205 19:14:02.985273 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:14:03 crc kubenswrapper[4982]: I1205 19:14:03.516623 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 19:14:03 crc kubenswrapper[4982]: I1205 19:14:03.516690 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:14:03 crc kubenswrapper[4982]: I1205 19:14:03.517646 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:03 crc kubenswrapper[4982]: I1205 19:14:03.517713 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:03 crc kubenswrapper[4982]: I1205 19:14:03.517727 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:03 crc kubenswrapper[4982]: I1205 19:14:03.933406 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 19:14:03 crc kubenswrapper[4982]: I1205 19:14:03.933665 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:14:03 crc kubenswrapper[4982]: I1205 19:14:03.935004 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:03 crc kubenswrapper[4982]: I1205 19:14:03.935053 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:03 crc kubenswrapper[4982]: I1205 19:14:03.935071 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:03 crc kubenswrapper[4982]: I1205 19:14:03.949988 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.054552 4982 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.054627 4982 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.059489 4982 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.067473 4982 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.120723 4982 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:60504->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.120801 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:60504->192.168.126.11:17697: read: connection reset by peer" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.120811 4982 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:46574->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.120898 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:46574->192.168.126.11:17697: read: connection reset by peer" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.121415 4982 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.121497 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.168930 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.172852 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.332694 4982 apiserver.go:52] "Watching apiserver" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.335042 4982 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.335496 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.336035 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.336189 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.336369 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.336375 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.336562 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.337693 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.338280 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.339458 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.339864 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.339944 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.340331 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.342430 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.342438 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.343266 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.343404 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.343481 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.343772 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.343862 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.374699 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.387619 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.400230 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.412436 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.423742 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.432310 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.432747 4982 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.442403 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.452769 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457425 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457458 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457482 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457508 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457537 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457555 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457576 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457594 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457612 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457630 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457648 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457666 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457686 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457703 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457747 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457764 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457781 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457804 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457791 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457846 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457978 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.457999 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458015 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458033 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458035 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458053 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458074 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458068 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458095 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458113 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458130 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458164 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458197 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458231 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458250 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458274 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458294 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458312 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458350 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458370 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458390 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458411 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458426 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458444 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458492 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458511 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458530 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458547 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458563 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458588 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458609 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458626 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458642 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458660 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458676 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458693 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458710 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458729 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458747 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458766 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458784 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458803 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458819 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458836 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458853 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458872 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458890 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458906 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458923 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458943 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458963 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458980 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458997 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459018 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459037 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459053 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459072 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459087 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459104 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459122 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459136 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459170 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459188 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459203 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459220 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458246 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460497 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458281 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458423 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458538 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458581 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458576 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458750 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458768 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458839 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458922 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.458967 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459125 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459139 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459220 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459231 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.459254 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:14:04.959231992 +0000 UTC m=+23.841117987 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459354 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459383 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459461 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459473 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459488 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459498 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459600 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459708 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459736 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.459746 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460007 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460023 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460043 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460106 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460193 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460265 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460276 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460319 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460758 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460801 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460830 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460855 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460878 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460900 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460917 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460933 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460949 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460965 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460981 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460999 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461015 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461033 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461049 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461064 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461083 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461102 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461119 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461139 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461289 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462378 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462410 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462427 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462446 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462486 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462508 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462530 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462551 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462570 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462594 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462612 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462632 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462651 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462676 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462697 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462719 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462739 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462756 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462776 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462797 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462815 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462832 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462851 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462870 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462890 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462910 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462934 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462965 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462990 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463019 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463045 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463071 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463096 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463120 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463668 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463695 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463718 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463740 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463760 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463779 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463800 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463858 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463882 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463902 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463922 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463948 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463968 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463986 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464011 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464037 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464058 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464079 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464110 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464139 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464182 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464202 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464221 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464240 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464259 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464277 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464295 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464318 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464336 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464354 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464377 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464399 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464418 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464436 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464453 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464472 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464494 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464514 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464532 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464552 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464573 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464592 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464611 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464631 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464701 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464725 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464745 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464767 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464788 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464807 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464827 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464846 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464866 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464887 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464906 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464924 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464945 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464962 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465012 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465043 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465076 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465098 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465121 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465157 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465183 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465209 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465230 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465255 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465276 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465298 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465319 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465600 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465747 4982 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465773 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465792 4982 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465815 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465834 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465849 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465959 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465977 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465992 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466007 4982 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466020 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466033 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466045 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466059 4982 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466071 4982 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466084 4982 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466096 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466108 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466120 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466133 4982 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466170 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466184 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466197 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466209 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466220 4982 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466270 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.468842 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.468878 4982 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.468893 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.468906 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.468920 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.468945 4982 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.468956 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.468969 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.468982 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.469000 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.469015 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.469083 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.472919 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.472942 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.474759 4982 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461284 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461659 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460339 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460376 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460472 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461686 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.461991 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462375 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462565 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462646 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462800 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462868 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462831 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.462915 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463560 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463635 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463802 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463852 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463918 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.463944 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464384 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464174 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464433 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464643 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464664 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464700 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464827 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465134 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.486526 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.483949 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464762 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465395 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.465412 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.460322 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465542 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.464237 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.465950 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466354 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.466387 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.467102 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.467342 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.467430 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.467571 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.467587 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.467734 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.467769 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.467841 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.467849 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.468198 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.469069 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.469300 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.469369 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.469635 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.469641 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.469703 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.469822 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.469862 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.469879 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.470047 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.470413 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.470449 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.471024 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.471083 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.471506 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.471564 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.471715 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.471856 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.471879 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.471977 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.471986 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.472324 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.472454 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.472697 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.473747 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.473831 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.473994 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.472773 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.474224 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.474476 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.474899 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.475414 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.475453 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.477428 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.477771 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.478023 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.478037 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.478362 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.478321 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.478374 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.478735 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.479042 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.479276 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.479315 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.480658 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.480967 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.481378 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.481458 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.481581 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.480992 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.481679 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.481908 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.482321 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.482598 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.482803 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.482917 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.483200 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.483223 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.483630 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.484105 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.484671 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.484747 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.484791 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.484944 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.485640 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.485951 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.486197 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.488817 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:04.986981749 +0000 UTC m=+23.868867754 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.489205 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.489241 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.489361 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.489387 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.489286 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:04.98893524 +0000 UTC m=+23.870821425 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.489600 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:04.989571846 +0000 UTC m=+23.871458001 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.490254 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.490960 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.490998 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.491017 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.491087 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:04.991068596 +0000 UTC m=+23.872954761 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.491755 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.494356 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.495398 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.495646 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.495653 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.496415 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.496557 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.496877 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.497261 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.497287 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.497438 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.497492 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.497503 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.498912 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.499195 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.499238 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.499282 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.499356 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.499424 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.499422 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.499866 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.500110 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.500436 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.501452 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.501540 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.501707 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.501936 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.502041 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.502555 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.502649 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.502840 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.502733 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.503074 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.503226 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.503298 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.503499 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.503520 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.503769 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.503836 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.503908 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.504078 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.505580 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.521111 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.523617 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.523610 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.526411 4982 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646" exitCode=255 Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.526671 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646"} Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.532871 4982 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.535869 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.536663 4982 scope.go:117] "RemoveContainer" containerID="881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.537561 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.539736 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.540408 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.540598 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.552527 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.563331 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569603 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569648 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569702 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569717 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569727 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569738 4982 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569748 4982 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569756 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569766 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569775 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569783 4982 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569792 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569801 4982 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569809 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569817 4982 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569826 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569835 4982 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569844 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569853 4982 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569862 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569870 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569879 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569888 4982 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569897 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569907 4982 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569917 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569928 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569939 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569950 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569959 4982 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569969 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569978 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569987 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.569996 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570007 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570016 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570025 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570034 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570042 4982 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570051 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570060 4982 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570070 4982 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570079 4982 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570088 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570097 4982 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570106 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570114 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570124 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570132 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570141 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570167 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570178 4982 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570189 4982 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570198 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570208 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570217 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570226 4982 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570235 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570245 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570254 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570271 4982 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570284 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570297 4982 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570311 4982 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570319 4982 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570329 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570338 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570348 4982 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570356 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570365 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570373 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570382 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570391 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570400 4982 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570408 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570444 4982 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570453 4982 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570462 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570471 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570480 4982 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570489 4982 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570498 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570504 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570508 4982 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570566 4982 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570580 4982 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570594 4982 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570609 4982 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570622 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570638 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570650 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570663 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570676 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570688 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570701 4982 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570713 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570752 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570638 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570779 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570823 4982 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570837 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570849 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570860 4982 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570917 4982 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570929 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570959 4982 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570972 4982 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.570984 4982 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571017 4982 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571029 4982 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571041 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571053 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571067 4982 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571080 4982 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571095 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571107 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571123 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571136 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571166 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571180 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571193 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571213 4982 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571226 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571239 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571252 4982 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571264 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571276 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571289 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571301 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571313 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571324 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571336 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571350 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571367 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571386 4982 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571405 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571455 4982 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571472 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571485 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571535 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571551 4982 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571563 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571577 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571590 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571603 4982 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571617 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571629 4982 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571641 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571653 4982 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571665 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571796 4982 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571809 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571822 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571840 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571852 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571865 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571878 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571890 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571902 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571916 4982 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.571927 4982 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.573235 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.598549 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.633518 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.638337 4982 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.655815 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.661691 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.664913 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.672958 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 19:14:04 crc kubenswrapper[4982]: W1205 19:14:04.683960 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-790eeec8e176faaccb663e8b01f24028fead7843df9410fa5356b8e15fcf2070 WatchSource:0}: Error finding container 790eeec8e176faaccb663e8b01f24028fead7843df9410fa5356b8e15fcf2070: Status 404 returned error can't find the container with id 790eeec8e176faaccb663e8b01f24028fead7843df9410fa5356b8e15fcf2070 Dec 05 19:14:04 crc kubenswrapper[4982]: W1205 19:14:04.685499 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-b39d55355e87bc36925f1f309b2b4b5e4f7c95340f0683348fb6c89d36a24d00 WatchSource:0}: Error finding container b39d55355e87bc36925f1f309b2b4b5e4f7c95340f0683348fb6c89d36a24d00: Status 404 returned error can't find the container with id b39d55355e87bc36925f1f309b2b4b5e4f7c95340f0683348fb6c89d36a24d00 Dec 05 19:14:04 crc kubenswrapper[4982]: W1205 19:14:04.698330 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-c37f385be723b7ff1523ede38d4939a7f486ccb7ccacd67cf41ad2ff88d0d880 WatchSource:0}: Error finding container c37f385be723b7ff1523ede38d4939a7f486ccb7ccacd67cf41ad2ff88d0d880: Status 404 returned error can't find the container with id c37f385be723b7ff1523ede38d4939a7f486ccb7ccacd67cf41ad2ff88d0d880 Dec 05 19:14:04 crc kubenswrapper[4982]: I1205 19:14:04.974931 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:04 crc kubenswrapper[4982]: E1205 19:14:04.975086 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:14:05.975056967 +0000 UTC m=+24.856942972 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.076438 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.076674 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.077541 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:06.077518839 +0000 UTC m=+24.959404904 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.077641 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.077704 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.077727 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.077810 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:06.077785976 +0000 UTC m=+24.959672001 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.077873 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.077955 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.078006 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.078143 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.078237 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.078254 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.078307 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:06.078285479 +0000 UTC m=+24.960171514 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.078409 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.078466 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:06.078453204 +0000 UTC m=+24.960339239 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.116949 4982 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.394575 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.395425 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.396202 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.396861 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.397493 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.398077 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.398786 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.399412 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.400086 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.400675 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.401277 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.401973 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.402533 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.403108 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.404996 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.405729 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.406646 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.407135 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.407841 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.409249 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.409859 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.410987 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.411562 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.412797 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.413316 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.414594 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.415410 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.415944 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.418809 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.419534 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.420721 4982 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.420948 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.423003 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.424321 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.424920 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.430996 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.432300 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.433512 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.434367 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.435894 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.436493 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.437617 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.438708 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.439511 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.440422 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.441005 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.442092 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.443006 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.443599 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.444546 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.445139 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.447525 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.448181 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.448703 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.530691 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c37f385be723b7ff1523ede38d4939a7f486ccb7ccacd67cf41ad2ff88d0d880"} Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.532620 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e"} Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.532693 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4"} Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.532713 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"790eeec8e176faaccb663e8b01f24028fead7843df9410fa5356b8e15fcf2070"} Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.534887 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542"} Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.534921 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"b39d55355e87bc36925f1f309b2b4b5e4f7c95340f0683348fb6c89d36a24d00"} Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.536908 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.538474 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111"} Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.539040 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.560244 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.581709 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.607578 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.630776 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.649540 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.663314 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.675384 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.687105 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.704639 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.727494 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.741940 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.759172 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.772194 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.788600 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.801363 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.813613 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.830385 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.846186 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:05Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:05 crc kubenswrapper[4982]: I1205 19:14:05.985325 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:05 crc kubenswrapper[4982]: E1205 19:14:05.985464 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:14:07.98544092 +0000 UTC m=+26.867326915 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:14:06 crc kubenswrapper[4982]: I1205 19:14:06.086680 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:06 crc kubenswrapper[4982]: I1205 19:14:06.086723 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:06 crc kubenswrapper[4982]: I1205 19:14:06.086741 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:06 crc kubenswrapper[4982]: I1205 19:14:06.086763 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.086871 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.086906 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.086921 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.086934 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.086949 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.086998 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.087009 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.087018 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.086965 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:08.086940277 +0000 UTC m=+26.968826312 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.087079 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:08.08707078 +0000 UTC m=+26.968956775 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.087091 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:08.087085151 +0000 UTC m=+26.968971146 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.087100 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:08.087096131 +0000 UTC m=+26.968982126 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:06 crc kubenswrapper[4982]: I1205 19:14:06.390204 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:06 crc kubenswrapper[4982]: I1205 19:14:06.390260 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:06 crc kubenswrapper[4982]: I1205 19:14:06.390222 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.390584 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.390733 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:06 crc kubenswrapper[4982]: E1205 19:14:06.390856 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.260062 4982 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.262645 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.262728 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.262747 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.262852 4982 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.276602 4982 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.277280 4982 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.279482 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.279513 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.279522 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.279537 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.279549 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:07Z","lastTransitionTime":"2025-12-05T19:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:07 crc kubenswrapper[4982]: E1205 19:14:07.303021 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:07Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.307730 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.307786 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.307798 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.307818 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.307833 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:07Z","lastTransitionTime":"2025-12-05T19:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:07 crc kubenswrapper[4982]: E1205 19:14:07.329650 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:07Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.333654 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.333693 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.333705 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.333724 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.333739 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:07Z","lastTransitionTime":"2025-12-05T19:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:07 crc kubenswrapper[4982]: E1205 19:14:07.347403 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:07Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.351941 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.351972 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.351983 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.352000 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.352012 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:07Z","lastTransitionTime":"2025-12-05T19:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:07 crc kubenswrapper[4982]: E1205 19:14:07.365520 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:07Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.370103 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.370183 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.370206 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.370228 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.370241 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:07Z","lastTransitionTime":"2025-12-05T19:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:07 crc kubenswrapper[4982]: E1205 19:14:07.387251 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:07Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:07 crc kubenswrapper[4982]: E1205 19:14:07.387422 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.389800 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.389847 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.389856 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.389873 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.389886 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:07Z","lastTransitionTime":"2025-12-05T19:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.497528 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.497611 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.497631 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.497659 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.497679 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:07Z","lastTransitionTime":"2025-12-05T19:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.601356 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.601440 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.601465 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.601495 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.601517 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:07Z","lastTransitionTime":"2025-12-05T19:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.705331 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.705402 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.705420 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.705445 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.705464 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:07Z","lastTransitionTime":"2025-12-05T19:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.808594 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.808656 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.808675 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.808700 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.808719 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:07Z","lastTransitionTime":"2025-12-05T19:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.912467 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.912546 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.912571 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.912602 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:07 crc kubenswrapper[4982]: I1205 19:14:07.912625 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:07Z","lastTransitionTime":"2025-12-05T19:14:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.003775 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.004046 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:14:12.004011546 +0000 UTC m=+30.885897541 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.016242 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.016312 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.016335 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.016371 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.016395 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:08Z","lastTransitionTime":"2025-12-05T19:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.105314 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.105386 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.105416 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.105443 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.105541 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.105641 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.105657 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:12.105631577 +0000 UTC m=+30.987517582 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.105668 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.105672 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.105719 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.105686 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.105739 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.105786 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.105803 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:12.10576532 +0000 UTC m=+30.987651325 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.106003 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:12.105965295 +0000 UTC m=+30.987851330 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.106043 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:12.106029307 +0000 UTC m=+30.987915342 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.120497 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.120600 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.120632 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.120672 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.120698 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:08Z","lastTransitionTime":"2025-12-05T19:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.224645 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.224691 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.224701 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.224715 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.224726 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:08Z","lastTransitionTime":"2025-12-05T19:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.328006 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.328073 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.328093 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.328120 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.328141 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:08Z","lastTransitionTime":"2025-12-05T19:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.389374 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.389376 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.389540 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.389632 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.389885 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:08 crc kubenswrapper[4982]: E1205 19:14:08.390060 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.431756 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.431837 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.431859 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.431890 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.431911 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:08Z","lastTransitionTime":"2025-12-05T19:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.536338 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.536403 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.536419 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.536446 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.536461 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:08Z","lastTransitionTime":"2025-12-05T19:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.551837 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0"} Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.574232 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:08Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.595740 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:08Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.621806 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:08Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.638985 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.639027 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.639038 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.639055 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.639067 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:08Z","lastTransitionTime":"2025-12-05T19:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.640971 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:08Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.660872 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:08Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.679368 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:08Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.701420 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:08Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.717325 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:08Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.732087 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:08Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.741583 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.741652 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.741666 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.741684 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.741697 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:08Z","lastTransitionTime":"2025-12-05T19:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.844927 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.845020 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.845043 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.845078 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.845101 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:08Z","lastTransitionTime":"2025-12-05T19:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.948009 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.948078 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.948092 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.948110 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:08 crc kubenswrapper[4982]: I1205 19:14:08.948120 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:08Z","lastTransitionTime":"2025-12-05T19:14:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.055034 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.055143 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.055205 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.055256 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.055285 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:09Z","lastTransitionTime":"2025-12-05T19:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.158587 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.158654 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.158672 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.158697 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.158716 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:09Z","lastTransitionTime":"2025-12-05T19:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.261577 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.261645 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.261663 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.261689 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.261708 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:09Z","lastTransitionTime":"2025-12-05T19:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.364294 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.364366 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.364379 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.364396 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.364409 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:09Z","lastTransitionTime":"2025-12-05T19:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.467648 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.467698 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.467716 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.467734 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.467746 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:09Z","lastTransitionTime":"2025-12-05T19:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.504527 4982 csr.go:261] certificate signing request csr-htgkz is approved, waiting to be issued Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.527754 4982 csr.go:257] certificate signing request csr-htgkz is issued Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.570572 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.570630 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.570643 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.570663 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.570677 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:09Z","lastTransitionTime":"2025-12-05T19:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.673016 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.673058 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.673068 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.673085 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.673100 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:09Z","lastTransitionTime":"2025-12-05T19:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.776001 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.776044 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.776054 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.776071 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.776083 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:09Z","lastTransitionTime":"2025-12-05T19:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.813522 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-tlc5r"] Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.813892 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-tlc5r" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.815754 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.816289 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.816587 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.831468 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.847862 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.861558 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.878428 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.878879 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.878918 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.878929 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.878944 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.878954 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:09Z","lastTransitionTime":"2025-12-05T19:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.891841 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.911659 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.921951 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/bc220b96-80bd-4c1e-99c7-a39446465bfd-hosts-file\") pod \"node-resolver-tlc5r\" (UID: \"bc220b96-80bd-4c1e-99c7-a39446465bfd\") " pod="openshift-dns/node-resolver-tlc5r" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.922000 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdxc7\" (UniqueName: \"kubernetes.io/projected/bc220b96-80bd-4c1e-99c7-a39446465bfd-kube-api-access-cdxc7\") pod \"node-resolver-tlc5r\" (UID: \"bc220b96-80bd-4c1e-99c7-a39446465bfd\") " pod="openshift-dns/node-resolver-tlc5r" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.928910 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.944221 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.959613 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.974206 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.981354 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.981395 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.981405 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.981421 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:09 crc kubenswrapper[4982]: I1205 19:14:09.981433 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:09Z","lastTransitionTime":"2025-12-05T19:14:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.023216 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/bc220b96-80bd-4c1e-99c7-a39446465bfd-hosts-file\") pod \"node-resolver-tlc5r\" (UID: \"bc220b96-80bd-4c1e-99c7-a39446465bfd\") " pod="openshift-dns/node-resolver-tlc5r" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.023272 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdxc7\" (UniqueName: \"kubernetes.io/projected/bc220b96-80bd-4c1e-99c7-a39446465bfd-kube-api-access-cdxc7\") pod \"node-resolver-tlc5r\" (UID: \"bc220b96-80bd-4c1e-99c7-a39446465bfd\") " pod="openshift-dns/node-resolver-tlc5r" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.023432 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/bc220b96-80bd-4c1e-99c7-a39446465bfd-hosts-file\") pod \"node-resolver-tlc5r\" (UID: \"bc220b96-80bd-4c1e-99c7-a39446465bfd\") " pod="openshift-dns/node-resolver-tlc5r" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.057066 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdxc7\" (UniqueName: \"kubernetes.io/projected/bc220b96-80bd-4c1e-99c7-a39446465bfd-kube-api-access-cdxc7\") pod \"node-resolver-tlc5r\" (UID: \"bc220b96-80bd-4c1e-99c7-a39446465bfd\") " pod="openshift-dns/node-resolver-tlc5r" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.084621 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.084667 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.084679 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.084695 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.084707 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:10Z","lastTransitionTime":"2025-12-05T19:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.126175 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-tlc5r" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.189821 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.189870 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.189882 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.189905 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.189918 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:10Z","lastTransitionTime":"2025-12-05T19:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.218722 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-8ldph"] Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.219100 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.223308 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.227653 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.227665 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.227703 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.227718 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.242635 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xlt6h"] Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.245285 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-dldj9"] Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.245521 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-7q67q"] Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.246113 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.246470 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.246799 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.251827 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.251975 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.252116 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.252217 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.252287 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.252364 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.252418 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.252572 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.252702 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.252852 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.253034 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.253187 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.255480 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.255706 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.260579 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.281919 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.296960 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.297369 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.297400 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.297413 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.297430 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.297443 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:10Z","lastTransitionTime":"2025-12-05T19:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.313936 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328245 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328293 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328316 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-var-lib-openvswitch\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328333 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-ovn\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328350 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-node-log\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328365 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-cni-bin\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328440 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-system-cni-dir\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328478 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-kubelet\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328503 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-openvswitch\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328547 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbkcn\" (UniqueName: \"kubernetes.io/projected/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-kube-api-access-sbkcn\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328589 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-multus-socket-dir-parent\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328617 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-var-lib-cni-multus\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328637 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/62e7c34f-d411-481e-a5bb-885e7cbd4326-multus-daemon-config\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328662 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-hostroot\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328686 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-os-release\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328711 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-multus-conf-dir\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328734 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-etc-kubernetes\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328759 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-run-k8s-cni-cncf-io\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328781 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztpvh\" (UniqueName: \"kubernetes.io/projected/6902f814-e220-41f2-887a-1831b28c14ee-kube-api-access-ztpvh\") pod \"machine-config-daemon-dldj9\" (UID: \"6902f814-e220-41f2-887a-1831b28c14ee\") " pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328806 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328858 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-run-netns\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328884 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovn-node-metrics-cert\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328906 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-systemd\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328927 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-cni-netd\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328965 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-var-lib-kubelet\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.328904 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329130 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-systemd-units\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329210 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-slash\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329240 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-os-release\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329261 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-run-multus-certs\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329316 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-env-overrides\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329417 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2rt4\" (UniqueName: \"kubernetes.io/projected/62e7c34f-d411-481e-a5bb-885e7cbd4326-kube-api-access-v2rt4\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329442 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6902f814-e220-41f2-887a-1831b28c14ee-proxy-tls\") pod \"machine-config-daemon-dldj9\" (UID: \"6902f814-e220-41f2-887a-1831b28c14ee\") " pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329498 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-log-socket\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329556 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-system-cni-dir\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329574 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-cnibin\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329593 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovnkube-config\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329625 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovnkube-script-lib\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329684 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/62e7c34f-d411-481e-a5bb-885e7cbd4326-cni-binary-copy\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329702 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-var-lib-cni-bin\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329719 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6902f814-e220-41f2-887a-1831b28c14ee-rootfs\") pod \"machine-config-daemon-dldj9\" (UID: \"6902f814-e220-41f2-887a-1831b28c14ee\") " pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329739 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-run-ovn-kubernetes\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329756 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-run-netns\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329773 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6902f814-e220-41f2-887a-1831b28c14ee-mcd-auth-proxy-config\") pod \"machine-config-daemon-dldj9\" (UID: \"6902f814-e220-41f2-887a-1831b28c14ee\") " pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329801 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329830 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-59mx6\" (UniqueName: \"kubernetes.io/projected/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-kube-api-access-59mx6\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329852 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-etc-openvswitch\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329876 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-multus-cni-dir\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.329907 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-cnibin\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.343744 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.357402 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.379484 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.389734 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.389749 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.389789 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:10 crc kubenswrapper[4982]: E1205 19:14:10.390382 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:10 crc kubenswrapper[4982]: E1205 19:14:10.390541 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:10 crc kubenswrapper[4982]: E1205 19:14:10.390774 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.394978 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.400069 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.400119 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.400131 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.400169 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.400185 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:10Z","lastTransitionTime":"2025-12-05T19:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.409337 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.423295 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.430894 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-os-release\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.430954 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-multus-conf-dir\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.430993 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-etc-kubernetes\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431030 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-run-k8s-cni-cncf-io\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431064 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztpvh\" (UniqueName: \"kubernetes.io/projected/6902f814-e220-41f2-887a-1831b28c14ee-kube-api-access-ztpvh\") pod \"machine-config-daemon-dldj9\" (UID: \"6902f814-e220-41f2-887a-1831b28c14ee\") " pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431087 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-multus-conf-dir\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431115 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-etc-kubernetes\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431084 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-os-release\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431120 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-run-k8s-cni-cncf-io\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431098 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431266 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-run-netns\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431300 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovn-node-metrics-cert\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431333 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-systemd\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431374 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-cni-netd\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431398 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-run-netns\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431411 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-var-lib-kubelet\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431440 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-systemd-units\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431467 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-cni-netd\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431472 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-slash\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431509 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-os-release\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431535 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-slash\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431539 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-run-multus-certs\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431444 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-systemd\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431511 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-systemd-units\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431570 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-os-release\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431509 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-var-lib-kubelet\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431591 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-run-multus-certs\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431583 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-env-overrides\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431641 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2rt4\" (UniqueName: \"kubernetes.io/projected/62e7c34f-d411-481e-a5bb-885e7cbd4326-kube-api-access-v2rt4\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431666 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6902f814-e220-41f2-887a-1831b28c14ee-proxy-tls\") pod \"machine-config-daemon-dldj9\" (UID: \"6902f814-e220-41f2-887a-1831b28c14ee\") " pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431687 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-log-socket\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431707 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-system-cni-dir\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431730 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-cnibin\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431748 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovnkube-config\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431772 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovnkube-script-lib\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431787 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/62e7c34f-d411-481e-a5bb-885e7cbd4326-cni-binary-copy\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431803 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-var-lib-cni-bin\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431819 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6902f814-e220-41f2-887a-1831b28c14ee-rootfs\") pod \"machine-config-daemon-dldj9\" (UID: \"6902f814-e220-41f2-887a-1831b28c14ee\") " pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431874 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-run-ovn-kubernetes\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431889 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-run-netns\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431904 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6902f814-e220-41f2-887a-1831b28c14ee-mcd-auth-proxy-config\") pod \"machine-config-daemon-dldj9\" (UID: \"6902f814-e220-41f2-887a-1831b28c14ee\") " pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431919 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431950 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-59mx6\" (UniqueName: \"kubernetes.io/projected/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-kube-api-access-59mx6\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431968 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-etc-openvswitch\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.431985 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-multus-cni-dir\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432001 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-cnibin\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432017 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432035 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432041 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432052 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-var-lib-openvswitch\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432068 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-ovn\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432082 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-node-log\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432094 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-run-ovn-kubernetes\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432099 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-cni-bin\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432121 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-cni-bin\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432161 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-run-netns\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432168 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-system-cni-dir\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432194 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-kubelet\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432229 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-openvswitch\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432250 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbkcn\" (UniqueName: \"kubernetes.io/projected/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-kube-api-access-sbkcn\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432271 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-multus-socket-dir-parent\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432291 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-var-lib-cni-multus\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432312 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/62e7c34f-d411-481e-a5bb-885e7cbd4326-multus-daemon-config\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432332 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-hostroot\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432385 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-hostroot\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432380 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-env-overrides\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432426 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-var-lib-cni-bin\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432431 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/6902f814-e220-41f2-887a-1831b28c14ee-rootfs\") pod \"machine-config-daemon-dldj9\" (UID: \"6902f814-e220-41f2-887a-1831b28c14ee\") " pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432480 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-openvswitch\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432480 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-kubelet\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432699 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6902f814-e220-41f2-887a-1831b28c14ee-mcd-auth-proxy-config\") pod \"machine-config-daemon-dldj9\" (UID: \"6902f814-e220-41f2-887a-1831b28c14ee\") " pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432871 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-multus-socket-dir-parent\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432907 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-host-var-lib-cni-multus\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432939 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-system-cni-dir\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432997 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-log-socket\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.432451 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-system-cni-dir\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433110 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-cnibin\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433227 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433244 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-etc-openvswitch\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433349 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-cni-binary-copy\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433348 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovnkube-config\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433343 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-var-lib-openvswitch\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433428 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-ovn\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433484 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-cnibin\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433520 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-node-log\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433622 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/62e7c34f-d411-481e-a5bb-885e7cbd4326-multus-cni-dir\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433899 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-tuning-conf-dir\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433940 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/62e7c34f-d411-481e-a5bb-885e7cbd4326-cni-binary-copy\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433950 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovnkube-script-lib\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.433904 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/62e7c34f-d411-481e-a5bb-885e7cbd4326-multus-daemon-config\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.436348 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovn-node-metrics-cert\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.437167 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6902f814-e220-41f2-887a-1831b28c14ee-proxy-tls\") pod \"machine-config-daemon-dldj9\" (UID: \"6902f814-e220-41f2-887a-1831b28c14ee\") " pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.450009 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbkcn\" (UniqueName: \"kubernetes.io/projected/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-kube-api-access-sbkcn\") pod \"ovnkube-node-xlt6h\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.452464 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztpvh\" (UniqueName: \"kubernetes.io/projected/6902f814-e220-41f2-887a-1831b28c14ee-kube-api-access-ztpvh\") pod \"machine-config-daemon-dldj9\" (UID: \"6902f814-e220-41f2-887a-1831b28c14ee\") " pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.452540 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-59mx6\" (UniqueName: \"kubernetes.io/projected/c16040d8-4ec1-41d8-8462-03c5d07aa6b9-kube-api-access-59mx6\") pod \"multus-additional-cni-plugins-7q67q\" (UID: \"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\") " pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.453302 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.455756 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2rt4\" (UniqueName: \"kubernetes.io/projected/62e7c34f-d411-481e-a5bb-885e7cbd4326-kube-api-access-v2rt4\") pod \"multus-8ldph\" (UID: \"62e7c34f-d411-481e-a5bb-885e7cbd4326\") " pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.468329 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.482027 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.503029 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.503091 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.503107 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.503130 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.503165 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:10Z","lastTransitionTime":"2025-12-05T19:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.506327 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.521215 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.529627 4982 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-05 19:09:09 +0000 UTC, rotation deadline is 2026-09-22 00:59:08.326993651 +0000 UTC Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.529694 4982 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6965h44m57.79730258s for next certificate rotation Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.538869 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.548927 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-8ldph" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.554984 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.563575 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-tlc5r" event={"ID":"bc220b96-80bd-4c1e-99c7-a39446465bfd","Type":"ContainerStarted","Data":"0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235"} Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.563709 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-tlc5r" event={"ID":"bc220b96-80bd-4c1e-99c7-a39446465bfd","Type":"ContainerStarted","Data":"2a42bffe7cb561327a26856ced8079e8bf6874dc4c5200b53655ab98e0c4d6b0"} Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.574058 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-7q67q" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.574199 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.584934 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:10 crc kubenswrapper[4982]: W1205 19:14:10.589879 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc16040d8_4ec1_41d8_8462_03c5d07aa6b9.slice/crio-707489c2ef9df63392c36a0ea0782b08fd270f342efe02f9612df504c6382e22 WatchSource:0}: Error finding container 707489c2ef9df63392c36a0ea0782b08fd270f342efe02f9612df504c6382e22: Status 404 returned error can't find the container with id 707489c2ef9df63392c36a0ea0782b08fd270f342efe02f9612df504c6382e22 Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.592742 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.595597 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.607196 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.607228 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.607237 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.607254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.607265 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:10Z","lastTransitionTime":"2025-12-05T19:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:10 crc kubenswrapper[4982]: W1205 19:14:10.609180 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6902f814_e220_41f2_887a_1831b28c14ee.slice/crio-10be8f3b490f28f3945666ca583add86d4e1d82b481d15eb89ff18f955b995a6 WatchSource:0}: Error finding container 10be8f3b490f28f3945666ca583add86d4e1d82b481d15eb89ff18f955b995a6: Status 404 returned error can't find the container with id 10be8f3b490f28f3945666ca583add86d4e1d82b481d15eb89ff18f955b995a6 Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.609288 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: W1205 19:14:10.609878 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod511e6b4b_3bb0_4288_9e2d_2d21485ef74c.slice/crio-4279575fcbf4c051e9e722e9f79ed5d62fb0f1c0c207ff62518ac7c3cc8f1417 WatchSource:0}: Error finding container 4279575fcbf4c051e9e722e9f79ed5d62fb0f1c0c207ff62518ac7c3cc8f1417: Status 404 returned error can't find the container with id 4279575fcbf4c051e9e722e9f79ed5d62fb0f1c0c207ff62518ac7c3cc8f1417 Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.624812 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.638233 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.650677 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.665362 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.681969 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.698022 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.712093 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.712165 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.712178 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.712197 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.712209 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:10Z","lastTransitionTime":"2025-12-05T19:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.713851 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.732521 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.746655 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.758132 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.775390 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.795974 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.814473 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.814621 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.814646 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.814654 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.814668 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.814679 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:10Z","lastTransitionTime":"2025-12-05T19:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.829091 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.844826 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.858464 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.871716 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.882753 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:10Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.918187 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.918228 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.918237 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.918251 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:10 crc kubenswrapper[4982]: I1205 19:14:10.918322 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:10Z","lastTransitionTime":"2025-12-05T19:14:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.021316 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.021357 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.021367 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.021389 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.021402 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:11Z","lastTransitionTime":"2025-12-05T19:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.132101 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.132349 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.132364 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.132384 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.132398 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:11Z","lastTransitionTime":"2025-12-05T19:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.235389 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.235455 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.235473 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.235501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.235546 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:11Z","lastTransitionTime":"2025-12-05T19:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.237529 4982 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.237767 4982 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.238129 4982 reflector.go:484] object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.238172 4982 reflector.go:484] object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.238140 4982 reflector.go:484] object-"openshift-machine-config-operator"/"proxy-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-machine-config-operator"/"proxy-tls": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.238272 4982 reflector.go:484] object-"openshift-multus"/"default-cni-sysctl-allowlist": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"default-cni-sysctl-allowlist": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.238328 4982 reflector.go:484] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": watch of *v1.Secret ended with: very short watch: object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.238358 4982 reflector.go:484] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": watch of *v1.Secret ended with: very short watch: object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.238629 4982 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": watch of *v1.Secret ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.238664 4982 reflector.go:484] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.238944 4982 reflector.go:484] object-"openshift-machine-config-operator"/"kube-rbac-proxy": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"kube-rbac-proxy": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.239027 4982 reflector.go:484] object-"openshift-ovn-kubernetes"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.239080 4982 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.239122 4982 reflector.go:484] object-"openshift-machine-config-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: W1205 19:14:11.239444 4982 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovnkube-config": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovnkube-config": Unexpected watch close - watch lasted less than a second and no items received Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.338465 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.338510 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.338520 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.338537 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.338546 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:11Z","lastTransitionTime":"2025-12-05T19:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.403549 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.417632 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.438063 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.440788 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.440830 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.440839 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.440857 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.440869 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:11Z","lastTransitionTime":"2025-12-05T19:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.465567 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.509570 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.533163 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.543929 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.543973 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.543983 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.544001 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.544014 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:11Z","lastTransitionTime":"2025-12-05T19:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.548276 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.566238 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.572270 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3" exitCode=0 Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.572354 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.572393 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"4279575fcbf4c051e9e722e9f79ed5d62fb0f1c0c207ff62518ac7c3cc8f1417"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.575202 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.575265 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.575276 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"10be8f3b490f28f3945666ca583add86d4e1d82b481d15eb89ff18f955b995a6"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.580752 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.585067 4982 generic.go:334] "Generic (PLEG): container finished" podID="c16040d8-4ec1-41d8-8462-03c5d07aa6b9" containerID="c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4" exitCode=0 Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.585176 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" event={"ID":"c16040d8-4ec1-41d8-8462-03c5d07aa6b9","Type":"ContainerDied","Data":"c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.585241 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" event={"ID":"c16040d8-4ec1-41d8-8462-03c5d07aa6b9","Type":"ContainerStarted","Data":"707489c2ef9df63392c36a0ea0782b08fd270f342efe02f9612df504c6382e22"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.587056 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8ldph" event={"ID":"62e7c34f-d411-481e-a5bb-885e7cbd4326","Type":"ContainerStarted","Data":"d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.587600 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8ldph" event={"ID":"62e7c34f-d411-481e-a5bb-885e7cbd4326","Type":"ContainerStarted","Data":"0b80debac15e2eaad992b0bcfda22737ad1f99c4787b8dab4b7e6a3a07a80291"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.595165 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.609744 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.624357 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.642080 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.648085 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.648138 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.648171 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.648193 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.648245 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:11Z","lastTransitionTime":"2025-12-05T19:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.655055 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.672248 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.697907 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.712622 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.731475 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.746589 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.753511 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.753556 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.753582 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.753605 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.753617 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:11Z","lastTransitionTime":"2025-12-05T19:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.764304 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.782066 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.798960 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.813536 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.828423 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.840481 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.857285 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.857584 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.857918 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.858047 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.858068 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.858081 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:11Z","lastTransitionTime":"2025-12-05T19:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.871895 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.886567 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.960725 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.960770 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.960778 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.960793 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:11 crc kubenswrapper[4982]: I1205 19:14:11.960803 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:11Z","lastTransitionTime":"2025-12-05T19:14:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.047560 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.047667 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:14:20.047645521 +0000 UTC m=+38.929531526 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.065303 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.065352 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.065365 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.065388 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.065400 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:12Z","lastTransitionTime":"2025-12-05T19:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.133248 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.149571 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.149650 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.149691 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.149733 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.149826 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.149861 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.149875 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.149913 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.149940 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:20.149920839 +0000 UTC m=+39.031806834 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.149944 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.149963 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.149987 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.150001 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:20.149990141 +0000 UTC m=+39.031876136 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.150057 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:20.150036472 +0000 UTC m=+39.031922477 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.150127 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.150192 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:20.150174506 +0000 UTC m=+39.032060521 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.169022 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.169070 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.169081 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.169101 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.169118 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:12Z","lastTransitionTime":"2025-12-05T19:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.272406 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.272953 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.272995 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.273016 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.273037 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.273050 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:12Z","lastTransitionTime":"2025-12-05T19:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.278304 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.373437 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.375117 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.375173 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.375186 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.375205 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.375217 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:12Z","lastTransitionTime":"2025-12-05T19:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.389486 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.389506 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.389631 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.389744 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.389881 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:12 crc kubenswrapper[4982]: E1205 19:14:12.390071 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.444076 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-hbm8m"] Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.444677 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-hbm8m" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.446944 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.447014 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.447021 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.447533 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.459783 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.471597 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.477872 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.477919 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.477932 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.477951 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.477971 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:12Z","lastTransitionTime":"2025-12-05T19:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.483813 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.497984 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.510088 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.510805 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.524489 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.537507 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.550324 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.555747 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr96s\" (UniqueName: \"kubernetes.io/projected/363b9582-fba3-4516-b66d-4623a292a882-kube-api-access-tr96s\") pod \"node-ca-hbm8m\" (UID: \"363b9582-fba3-4516-b66d-4623a292a882\") " pod="openshift-image-registry/node-ca-hbm8m" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.555974 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/363b9582-fba3-4516-b66d-4623a292a882-host\") pod \"node-ca-hbm8m\" (UID: \"363b9582-fba3-4516-b66d-4623a292a882\") " pod="openshift-image-registry/node-ca-hbm8m" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.556120 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/363b9582-fba3-4516-b66d-4623a292a882-serviceca\") pod \"node-ca-hbm8m\" (UID: \"363b9582-fba3-4516-b66d-4623a292a882\") " pod="openshift-image-registry/node-ca-hbm8m" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.576930 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.578243 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.580379 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.580420 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.580432 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.580449 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.580461 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:12Z","lastTransitionTime":"2025-12-05T19:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.592427 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.593491 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.593922 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.594059 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.594185 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.594305 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.594425 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.594516 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.596402 4982 generic.go:334] "Generic (PLEG): container finished" podID="c16040d8-4ec1-41d8-8462-03c5d07aa6b9" containerID="19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25" exitCode=0 Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.596485 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" event={"ID":"c16040d8-4ec1-41d8-8462-03c5d07aa6b9","Type":"ContainerDied","Data":"19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.604578 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.606218 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.617789 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.626117 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.630560 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.644189 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.656320 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.656735 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/363b9582-fba3-4516-b66d-4623a292a882-host\") pod \"node-ca-hbm8m\" (UID: \"363b9582-fba3-4516-b66d-4623a292a882\") " pod="openshift-image-registry/node-ca-hbm8m" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.656819 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/363b9582-fba3-4516-b66d-4623a292a882-serviceca\") pod \"node-ca-hbm8m\" (UID: \"363b9582-fba3-4516-b66d-4623a292a882\") " pod="openshift-image-registry/node-ca-hbm8m" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.656853 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/363b9582-fba3-4516-b66d-4623a292a882-host\") pod \"node-ca-hbm8m\" (UID: \"363b9582-fba3-4516-b66d-4623a292a882\") " pod="openshift-image-registry/node-ca-hbm8m" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.656904 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr96s\" (UniqueName: \"kubernetes.io/projected/363b9582-fba3-4516-b66d-4623a292a882-kube-api-access-tr96s\") pod \"node-ca-hbm8m\" (UID: \"363b9582-fba3-4516-b66d-4623a292a882\") " pod="openshift-image-registry/node-ca-hbm8m" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.658313 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/363b9582-fba3-4516-b66d-4623a292a882-serviceca\") pod \"node-ca-hbm8m\" (UID: \"363b9582-fba3-4516-b66d-4623a292a882\") " pod="openshift-image-registry/node-ca-hbm8m" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.667350 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.676424 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr96s\" (UniqueName: \"kubernetes.io/projected/363b9582-fba3-4516-b66d-4623a292a882-kube-api-access-tr96s\") pod \"node-ca-hbm8m\" (UID: \"363b9582-fba3-4516-b66d-4623a292a882\") " pod="openshift-image-registry/node-ca-hbm8m" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.679078 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.683943 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.683974 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.684001 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.684018 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.684031 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:12Z","lastTransitionTime":"2025-12-05T19:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.692821 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.693455 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.706014 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.721586 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.723839 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.736769 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.749037 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.762761 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.765558 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-hbm8m" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.777370 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.785866 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.785904 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.785913 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.785931 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.785944 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:12Z","lastTransitionTime":"2025-12-05T19:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.795132 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.808240 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.828322 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.855931 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.875849 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.890346 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.890408 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.890418 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.890439 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.890451 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:12Z","lastTransitionTime":"2025-12-05T19:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.906869 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.960363 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.985892 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:12Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.992788 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.992833 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.992846 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.992864 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:12 crc kubenswrapper[4982]: I1205 19:14:12.992877 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:12Z","lastTransitionTime":"2025-12-05T19:14:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.027596 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.096234 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.096281 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.096293 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.096314 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.096328 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:13Z","lastTransitionTime":"2025-12-05T19:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.199365 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.199456 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.199474 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.199505 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.199524 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:13Z","lastTransitionTime":"2025-12-05T19:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.302091 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.302138 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.302166 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.302185 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.302199 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:13Z","lastTransitionTime":"2025-12-05T19:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.404273 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.404339 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.404358 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.404385 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.404406 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:13Z","lastTransitionTime":"2025-12-05T19:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.507644 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.507733 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.507758 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.507790 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.507815 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:13Z","lastTransitionTime":"2025-12-05T19:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.605037 4982 generic.go:334] "Generic (PLEG): container finished" podID="c16040d8-4ec1-41d8-8462-03c5d07aa6b9" containerID="4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a" exitCode=0 Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.605120 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" event={"ID":"c16040d8-4ec1-41d8-8462-03c5d07aa6b9","Type":"ContainerDied","Data":"4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.607555 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-hbm8m" event={"ID":"363b9582-fba3-4516-b66d-4623a292a882","Type":"ContainerStarted","Data":"516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.607604 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-hbm8m" event={"ID":"363b9582-fba3-4516-b66d-4623a292a882","Type":"ContainerStarted","Data":"26a560f5d997b8a6d96f2f3f961d46149b59217a05afc401ae29214583d4806d"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.610695 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.610810 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.610887 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.610962 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.610998 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:13Z","lastTransitionTime":"2025-12-05T19:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.644280 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.671493 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.691280 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.711491 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.714361 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.714476 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.714502 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.714534 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.714558 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:13Z","lastTransitionTime":"2025-12-05T19:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.727342 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.744957 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.761486 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.781015 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.799193 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.818612 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.818668 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.818688 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.818716 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.818735 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:13Z","lastTransitionTime":"2025-12-05T19:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.819767 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.833243 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.849091 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.865406 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.884307 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.907370 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.921760 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.921817 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.921829 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.921847 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.921858 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:13Z","lastTransitionTime":"2025-12-05T19:14:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.924293 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.936193 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.954055 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.967853 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.980533 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:13 crc kubenswrapper[4982]: I1205 19:14:13.998374 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:13Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.012935 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.024918 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.024976 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.024991 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.025012 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.025028 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:14Z","lastTransitionTime":"2025-12-05T19:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.029835 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.062428 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.074043 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.090741 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.108823 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.127961 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.128014 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.128024 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.128040 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.128053 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:14Z","lastTransitionTime":"2025-12-05T19:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.150485 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.194579 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.226691 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.230909 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.230962 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.230979 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.231002 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.231022 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:14Z","lastTransitionTime":"2025-12-05T19:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.334907 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.334957 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.334969 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.334988 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.335002 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:14Z","lastTransitionTime":"2025-12-05T19:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.389908 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.389942 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.390053 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:14 crc kubenswrapper[4982]: E1205 19:14:14.390044 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:14 crc kubenswrapper[4982]: E1205 19:14:14.390297 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:14 crc kubenswrapper[4982]: E1205 19:14:14.390440 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.438308 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.438372 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.438384 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.438403 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.438419 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:14Z","lastTransitionTime":"2025-12-05T19:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.541340 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.541374 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.541387 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.541404 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.541416 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:14Z","lastTransitionTime":"2025-12-05T19:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.616804 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878"} Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.620690 4982 generic.go:334] "Generic (PLEG): container finished" podID="c16040d8-4ec1-41d8-8462-03c5d07aa6b9" containerID="e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521" exitCode=0 Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.620762 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" event={"ID":"c16040d8-4ec1-41d8-8462-03c5d07aa6b9","Type":"ContainerDied","Data":"e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521"} Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.645132 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.645237 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.645263 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.645297 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.645317 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:14Z","lastTransitionTime":"2025-12-05T19:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.657503 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.686738 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.705098 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.722666 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.738981 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.748131 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.748187 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.748198 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.748216 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.748230 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:14Z","lastTransitionTime":"2025-12-05T19:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.753508 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.772941 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.787921 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.803932 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.820495 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.833339 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.844008 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.850381 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.850424 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.850435 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.850452 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.850464 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:14Z","lastTransitionTime":"2025-12-05T19:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.856608 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.867974 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.880839 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:14Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.952646 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.952689 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.952698 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.952714 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:14 crc kubenswrapper[4982]: I1205 19:14:14.952725 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:14Z","lastTransitionTime":"2025-12-05T19:14:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.055087 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.055121 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.055134 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.055163 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.055177 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:15Z","lastTransitionTime":"2025-12-05T19:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.167243 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.167298 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.167315 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.167336 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.167353 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:15Z","lastTransitionTime":"2025-12-05T19:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.270280 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.270334 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.270347 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.270366 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.270379 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:15Z","lastTransitionTime":"2025-12-05T19:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.372680 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.372715 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.372725 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.372742 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.372751 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:15Z","lastTransitionTime":"2025-12-05T19:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.476135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.476213 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.476226 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.476263 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.476277 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:15Z","lastTransitionTime":"2025-12-05T19:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.580115 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.580184 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.580193 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.580209 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.580219 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:15Z","lastTransitionTime":"2025-12-05T19:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.630305 4982 generic.go:334] "Generic (PLEG): container finished" podID="c16040d8-4ec1-41d8-8462-03c5d07aa6b9" containerID="8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545" exitCode=0 Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.630372 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" event={"ID":"c16040d8-4ec1-41d8-8462-03c5d07aa6b9","Type":"ContainerDied","Data":"8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545"} Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.646101 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.670113 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.682271 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.682317 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.682336 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.682358 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.682376 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:15Z","lastTransitionTime":"2025-12-05T19:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.695426 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.710658 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.726471 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.741854 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.758436 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.775525 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.785103 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.785201 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.785227 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.785254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.785277 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:15Z","lastTransitionTime":"2025-12-05T19:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.794678 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.810354 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.822913 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.839428 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.854861 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.870141 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.888189 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.888244 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.888257 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.888276 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.888292 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:15Z","lastTransitionTime":"2025-12-05T19:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.889755 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:15Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.991536 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.991591 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.991611 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.991638 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:15 crc kubenswrapper[4982]: I1205 19:14:15.991658 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:15Z","lastTransitionTime":"2025-12-05T19:14:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.094373 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.094441 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.094459 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.094484 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.094503 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:16Z","lastTransitionTime":"2025-12-05T19:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.198072 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.198126 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.198138 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.198186 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.198203 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:16Z","lastTransitionTime":"2025-12-05T19:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.300831 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.300907 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.300931 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.300957 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.300978 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:16Z","lastTransitionTime":"2025-12-05T19:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.389516 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.389901 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.390022 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:16 crc kubenswrapper[4982]: E1205 19:14:16.390013 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:16 crc kubenswrapper[4982]: E1205 19:14:16.390090 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:16 crc kubenswrapper[4982]: E1205 19:14:16.390280 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.408063 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.408095 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.408104 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.408126 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.408138 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:16Z","lastTransitionTime":"2025-12-05T19:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.511201 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.511238 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.511248 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.511263 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.511274 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:16Z","lastTransitionTime":"2025-12-05T19:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.614319 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.614400 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.614424 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.614459 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.614487 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:16Z","lastTransitionTime":"2025-12-05T19:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.646801 4982 generic.go:334] "Generic (PLEG): container finished" podID="c16040d8-4ec1-41d8-8462-03c5d07aa6b9" containerID="dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787" exitCode=0 Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.646876 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" event={"ID":"c16040d8-4ec1-41d8-8462-03c5d07aa6b9","Type":"ContainerDied","Data":"dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787"} Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.701279 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.717530 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.717576 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.717586 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.717608 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.717619 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:16Z","lastTransitionTime":"2025-12-05T19:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.727600 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.741808 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.755256 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.782720 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.794942 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.810837 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.820124 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.820175 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.820186 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.820203 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.820212 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:16Z","lastTransitionTime":"2025-12-05T19:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.826598 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.839086 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.849033 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.857772 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.867549 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.878299 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.891622 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.904425 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:16Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.922552 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.922835 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.922849 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.922868 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:16 crc kubenswrapper[4982]: I1205 19:14:16.922882 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:16Z","lastTransitionTime":"2025-12-05T19:14:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.026183 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.026237 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.026255 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.026282 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.026304 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.130370 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.130478 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.130530 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.130567 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.130591 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.233734 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.233796 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.233817 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.233844 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.233868 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.336679 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.336726 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.336736 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.336751 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.336762 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.440065 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.440135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.440196 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.440225 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.440250 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.543848 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.543903 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.543918 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.543934 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.543946 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.646668 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.646715 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.646735 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.646760 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.646779 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.657627 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" event={"ID":"c16040d8-4ec1-41d8-8462-03c5d07aa6b9","Type":"ContainerStarted","Data":"2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.665114 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.665455 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.665529 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.678849 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.698517 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.701889 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.702071 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.707136 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.707192 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.707204 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.707220 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.707231 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.724996 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: E1205 19:14:17.728060 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.733338 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.733372 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.733382 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.733406 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.733417 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.741477 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: E1205 19:14:17.752738 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.757393 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.757455 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.757468 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.757489 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.757504 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.759233 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.778305 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: E1205 19:14:17.781622 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.786194 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.786237 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.786246 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.786265 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.786277 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.798931 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: E1205 19:14:17.801737 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.806159 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.806188 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.806199 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.806214 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.806225 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.817562 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: E1205 19:14:17.820063 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: E1205 19:14:17.820434 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.822477 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.822524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.822539 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.822559 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.822571 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.843988 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.864739 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.877287 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.889491 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.903521 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.914407 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.926101 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.926164 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.926181 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.926200 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.926213 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:17Z","lastTransitionTime":"2025-12-05T19:14:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.931944 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.944471 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.959715 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.978560 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:17 crc kubenswrapper[4982]: I1205 19:14:17.994920 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:17Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.025898 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:18Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.029305 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.029356 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.029376 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.029401 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.029422 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:18Z","lastTransitionTime":"2025-12-05T19:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.072240 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:18Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.099424 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:18Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.112790 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:18Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.127408 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:18Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.131674 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.131713 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.131725 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.131743 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.131756 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:18Z","lastTransitionTime":"2025-12-05T19:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.140530 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:18Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.156290 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:18Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.171112 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:18Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.185733 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:18Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.198629 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:18Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.214294 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:18Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.234631 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.234682 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.234694 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.234715 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.234729 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:18Z","lastTransitionTime":"2025-12-05T19:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.338627 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.338716 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.338739 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.338770 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.338791 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:18Z","lastTransitionTime":"2025-12-05T19:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.389528 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.389585 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.389689 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:18 crc kubenswrapper[4982]: E1205 19:14:18.389790 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:18 crc kubenswrapper[4982]: E1205 19:14:18.389954 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:18 crc kubenswrapper[4982]: E1205 19:14:18.390120 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.442303 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.442387 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.442401 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.442420 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.442431 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:18Z","lastTransitionTime":"2025-12-05T19:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.545982 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.546042 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.546054 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.546078 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.546091 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:18Z","lastTransitionTime":"2025-12-05T19:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.654908 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.654957 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.654967 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.654985 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.654995 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:18Z","lastTransitionTime":"2025-12-05T19:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.668606 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.757792 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.757826 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.757854 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.757870 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.757881 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:18Z","lastTransitionTime":"2025-12-05T19:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.860315 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.860363 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.860375 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.860394 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.860406 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:18Z","lastTransitionTime":"2025-12-05T19:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.963784 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.963856 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.963880 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.963909 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:18 crc kubenswrapper[4982]: I1205 19:14:18.963931 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:18Z","lastTransitionTime":"2025-12-05T19:14:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.067060 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.067492 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.067533 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.067558 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.067575 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:19Z","lastTransitionTime":"2025-12-05T19:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.170234 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.170270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.170282 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.170299 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.170309 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:19Z","lastTransitionTime":"2025-12-05T19:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.272865 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.272922 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.272939 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.272960 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.272974 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:19Z","lastTransitionTime":"2025-12-05T19:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.374993 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.375025 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.375033 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.375046 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.375055 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:19Z","lastTransitionTime":"2025-12-05T19:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.396398 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.477611 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.477660 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.477674 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.477690 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.477703 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:19Z","lastTransitionTime":"2025-12-05T19:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.580604 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.580656 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.580672 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.580693 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.580706 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:19Z","lastTransitionTime":"2025-12-05T19:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.674358 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/0.log" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.679197 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177" exitCode=1 Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.679271 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177"} Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.680439 4982 scope.go:117] "RemoveContainer" containerID="4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.684231 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.684274 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.684289 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.684313 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.684330 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:19Z","lastTransitionTime":"2025-12-05T19:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.718136 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:19Z\\\",\\\"message\\\":\\\" 6291 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:19.195404 6291 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.195995 6291 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.196116 6291 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196192 6291 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.196267 6291 factory.go:656] Stopping watch factory\\\\nI1205 19:14:19.196469 6291 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196888 6291 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196988 6291 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.755250 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.779611 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.786868 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.786931 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.786951 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.786975 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.786996 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:19Z","lastTransitionTime":"2025-12-05T19:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.801335 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.819189 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.834771 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.851470 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.867629 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.884180 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.889379 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.889425 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.889439 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.889458 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.889473 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:19Z","lastTransitionTime":"2025-12-05T19:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.898558 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.908302 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.920485 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.935602 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.949309 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.962265 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:19Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.992310 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.992347 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.992358 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.992374 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:19 crc kubenswrapper[4982]: I1205 19:14:19.992385 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:19Z","lastTransitionTime":"2025-12-05T19:14:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.094777 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.094817 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.094831 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.094849 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.094862 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:20Z","lastTransitionTime":"2025-12-05T19:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.144220 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.144401 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:14:36.144371109 +0000 UTC m=+55.026257104 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.197173 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.197215 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.197226 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.197251 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.197262 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:20Z","lastTransitionTime":"2025-12-05T19:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.245386 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.245474 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.245513 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.245539 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.245571 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.245600 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.245612 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.245624 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.245646 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.245651 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.245669 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:36.24565183 +0000 UTC m=+55.127537825 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.245673 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.245686 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.245694 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:36.245678151 +0000 UTC m=+55.127564156 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.245711 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:36.245703642 +0000 UTC m=+55.127589647 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.245727 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:36.245717642 +0000 UTC m=+55.127603647 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.299631 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.299668 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.299677 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.299692 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.299702 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:20Z","lastTransitionTime":"2025-12-05T19:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.389630 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.389743 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.390043 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.390091 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.390125 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:20 crc kubenswrapper[4982]: E1205 19:14:20.390182 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.402020 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.402065 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.402079 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.402099 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.402111 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:20Z","lastTransitionTime":"2025-12-05T19:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.505050 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.505103 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.505118 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.505139 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.505188 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:20Z","lastTransitionTime":"2025-12-05T19:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.607812 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.607868 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.607885 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.607909 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.607925 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:20Z","lastTransitionTime":"2025-12-05T19:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.693042 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/0.log" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.696489 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393"} Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.696965 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.710809 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.710843 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.710854 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.710873 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.710885 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:20Z","lastTransitionTime":"2025-12-05T19:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.714358 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.733687 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.752035 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.775895 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.797899 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.813590 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.813639 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.813648 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.813663 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.813673 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:20Z","lastTransitionTime":"2025-12-05T19:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.832170 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.853700 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.869700 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.901242 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:19Z\\\",\\\"message\\\":\\\" 6291 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:19.195404 6291 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.195995 6291 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.196116 6291 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196192 6291 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.196267 6291 factory.go:656] Stopping watch factory\\\\nI1205 19:14:19.196469 6291 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196888 6291 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196988 6291 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.913350 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.916336 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.916398 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.916413 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.916434 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.916449 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:20Z","lastTransitionTime":"2025-12-05T19:14:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.931469 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.944015 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.954539 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.967858 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:20 crc kubenswrapper[4982]: I1205 19:14:20.991057 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:20Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.019002 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.019045 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.019079 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.019096 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.019111 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:21Z","lastTransitionTime":"2025-12-05T19:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.121775 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.121823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.121839 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.121859 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.121875 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:21Z","lastTransitionTime":"2025-12-05T19:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.225127 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.225218 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.225236 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.225258 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.225278 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:21Z","lastTransitionTime":"2025-12-05T19:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.328614 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.328681 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.328698 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.328722 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.328740 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:21Z","lastTransitionTime":"2025-12-05T19:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.413229 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.431543 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.431584 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.431598 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.431614 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.431625 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:21Z","lastTransitionTime":"2025-12-05T19:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.437378 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.457145 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.493950 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:19Z\\\",\\\"message\\\":\\\" 6291 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:19.195404 6291 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.195995 6291 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.196116 6291 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196192 6291 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.196267 6291 factory.go:656] Stopping watch factory\\\\nI1205 19:14:19.196469 6291 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196888 6291 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196988 6291 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.516192 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.534489 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.534566 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.534592 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.534630 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.534667 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:21Z","lastTransitionTime":"2025-12-05T19:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.538442 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.559814 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.574490 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.597888 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.631037 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.637357 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.637612 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.637771 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.637923 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.638071 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:21Z","lastTransitionTime":"2025-12-05T19:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.648559 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.665537 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.680985 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.702081 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.703427 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/1.log" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.704448 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/0.log" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.708077 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393" exitCode=1 Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.708180 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393"} Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.708230 4982 scope.go:117] "RemoveContainer" containerID="4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.709326 4982 scope.go:117] "RemoveContainer" containerID="531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393" Dec 05 19:14:21 crc kubenswrapper[4982]: E1205 19:14:21.709644 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.723831 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.740810 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.740850 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.740859 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.740873 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.740883 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:21Z","lastTransitionTime":"2025-12-05T19:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.744851 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.764843 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.785770 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.802303 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.817099 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.838736 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.843835 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.843886 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.843899 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.843918 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.843930 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:21Z","lastTransitionTime":"2025-12-05T19:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.856744 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.871390 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.894791 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:19Z\\\",\\\"message\\\":\\\" 6291 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:19.195404 6291 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.195995 6291 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.196116 6291 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196192 6291 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.196267 6291 factory.go:656] Stopping watch factory\\\\nI1205 19:14:19.196469 6291 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196888 6291 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196988 6291 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:21Z\\\",\\\"message\\\":\\\"onfiguring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-multus/multus-admission-controller]} name:Service_openshift-multus/multus-admission-controller_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.119:443: 10.217.5.119:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d4efc4a8-c514-4a6b-901c-2953978b50d3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 19:14:20.468493 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.netwo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.916824 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.932343 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.946413 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.946450 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.946466 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.946484 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.946498 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:21Z","lastTransitionTime":"2025-12-05T19:14:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.950124 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.965100 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.977652 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:21 crc kubenswrapper[4982]: I1205 19:14:21.994315 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:21Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.049281 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.049334 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.049349 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.049365 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.049375 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:22Z","lastTransitionTime":"2025-12-05T19:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.093530 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.111808 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.123061 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.132851 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.145518 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.152013 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.152054 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.152065 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.152083 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.152094 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:22Z","lastTransitionTime":"2025-12-05T19:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.157117 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.173608 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.188713 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.203974 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.219007 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.230917 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.242310 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.254567 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.254608 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.254620 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.254638 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.254649 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:22Z","lastTransitionTime":"2025-12-05T19:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.256421 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj"] Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.256876 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.258683 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.258735 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.259200 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.276894 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.292482 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.335814 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:19Z\\\",\\\"message\\\":\\\" 6291 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:19.195404 6291 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.195995 6291 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.196116 6291 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196192 6291 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.196267 6291 factory.go:656] Stopping watch factory\\\\nI1205 19:14:19.196469 6291 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196888 6291 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196988 6291 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:21Z\\\",\\\"message\\\":\\\"onfiguring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-multus/multus-admission-controller]} name:Service_openshift-multus/multus-admission-controller_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.119:443: 10.217.5.119:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d4efc4a8-c514-4a6b-901c-2953978b50d3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 19:14:20.468493 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.netwo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.356767 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.356801 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.356812 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.356829 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.356842 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:22Z","lastTransitionTime":"2025-12-05T19:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.362997 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.364293 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ade8a138-629c-4ff7-8c39-c86ab2bb6a98-env-overrides\") pod \"ovnkube-control-plane-749d76644c-j2bqj\" (UID: \"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.364364 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ade8a138-629c-4ff7-8c39-c86ab2bb6a98-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-j2bqj\" (UID: \"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.364397 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ade8a138-629c-4ff7-8c39-c86ab2bb6a98-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-j2bqj\" (UID: \"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.364433 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtbsz\" (UniqueName: \"kubernetes.io/projected/ade8a138-629c-4ff7-8c39-c86ab2bb6a98-kube-api-access-xtbsz\") pod \"ovnkube-control-plane-749d76644c-j2bqj\" (UID: \"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.379259 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.389642 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.389701 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.389642 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:22 crc kubenswrapper[4982]: E1205 19:14:22.389773 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:22 crc kubenswrapper[4982]: E1205 19:14:22.389858 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:22 crc kubenswrapper[4982]: E1205 19:14:22.389940 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.391874 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.408505 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4df192a3fc2b795a463fb3099a8f029c4fe5a708af3bc0c6002bde4a7e8ed177\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:19Z\\\",\\\"message\\\":\\\" 6291 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:19.195404 6291 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.195995 6291 reflector.go:311] Stopping reflector *v1.AdminPolicyBasedExternalRoute (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/adminpolicybasedroute/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.196116 6291 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196192 6291 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:19.196267 6291 factory.go:656] Stopping watch factory\\\\nI1205 19:14:19.196469 6291 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196888 6291 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:19.196988 6291 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:21Z\\\",\\\"message\\\":\\\"onfiguring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-multus/multus-admission-controller]} name:Service_openshift-multus/multus-admission-controller_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.119:443: 10.217.5.119:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d4efc4a8-c514-4a6b-901c-2953978b50d3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 19:14:20.468493 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.netwo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.422541 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.435705 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.454071 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.459317 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.459365 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.459381 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.459403 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.459418 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:22Z","lastTransitionTime":"2025-12-05T19:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.465798 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ade8a138-629c-4ff7-8c39-c86ab2bb6a98-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-j2bqj\" (UID: \"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.465853 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtbsz\" (UniqueName: \"kubernetes.io/projected/ade8a138-629c-4ff7-8c39-c86ab2bb6a98-kube-api-access-xtbsz\") pod \"ovnkube-control-plane-749d76644c-j2bqj\" (UID: \"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.465887 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ade8a138-629c-4ff7-8c39-c86ab2bb6a98-env-overrides\") pod \"ovnkube-control-plane-749d76644c-j2bqj\" (UID: \"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.465932 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ade8a138-629c-4ff7-8c39-c86ab2bb6a98-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-j2bqj\" (UID: \"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.466620 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ade8a138-629c-4ff7-8c39-c86ab2bb6a98-env-overrides\") pod \"ovnkube-control-plane-749d76644c-j2bqj\" (UID: \"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.466740 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/ade8a138-629c-4ff7-8c39-c86ab2bb6a98-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-j2bqj\" (UID: \"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.469572 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.473981 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/ade8a138-629c-4ff7-8c39-c86ab2bb6a98-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-j2bqj\" (UID: \"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.482413 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtbsz\" (UniqueName: \"kubernetes.io/projected/ade8a138-629c-4ff7-8c39-c86ab2bb6a98-kube-api-access-xtbsz\") pod \"ovnkube-control-plane-749d76644c-j2bqj\" (UID: \"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.483403 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.496318 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.507245 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.518358 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.528858 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.540142 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.551610 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.561246 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.561633 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.561660 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.561668 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.561684 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.561695 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:22Z","lastTransitionTime":"2025-12-05T19:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.576209 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" Dec 05 19:14:22 crc kubenswrapper[4982]: W1205 19:14:22.597441 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podade8a138_629c_4ff7_8c39_c86ab2bb6a98.slice/crio-8b9f72c8753c210cd8437bb4c30d8777793940c4f83bac0469b1d348c931bf3d WatchSource:0}: Error finding container 8b9f72c8753c210cd8437bb4c30d8777793940c4f83bac0469b1d348c931bf3d: Status 404 returned error can't find the container with id 8b9f72c8753c210cd8437bb4c30d8777793940c4f83bac0469b1d348c931bf3d Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.664431 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.664466 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.664474 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.664488 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.664498 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:22Z","lastTransitionTime":"2025-12-05T19:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.718114 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" event={"ID":"ade8a138-629c-4ff7-8c39-c86ab2bb6a98","Type":"ContainerStarted","Data":"8b9f72c8753c210cd8437bb4c30d8777793940c4f83bac0469b1d348c931bf3d"} Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.721364 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/1.log" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.726483 4982 scope.go:117] "RemoveContainer" containerID="531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393" Dec 05 19:14:22 crc kubenswrapper[4982]: E1205 19:14:22.726730 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.740532 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.755218 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.768047 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.768115 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.768135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.768196 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.768221 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:22Z","lastTransitionTime":"2025-12-05T19:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.772358 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.798311 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:21Z\\\",\\\"message\\\":\\\"onfiguring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-multus/multus-admission-controller]} name:Service_openshift-multus/multus-admission-controller_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.119:443: 10.217.5.119:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d4efc4a8-c514-4a6b-901c-2953978b50d3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 19:14:20.468493 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.netwo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.814230 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.836612 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.852417 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.871173 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.871221 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.871232 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.871251 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.871263 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:22Z","lastTransitionTime":"2025-12-05T19:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.875531 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.892303 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.908393 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.924679 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.943052 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.957360 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.969748 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.974310 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.974374 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.974388 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.974405 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.974420 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:22Z","lastTransitionTime":"2025-12-05T19:14:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:22 crc kubenswrapper[4982]: I1205 19:14:22.982465 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:22Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.011608 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:23Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.077666 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.077722 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.077739 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.077765 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.077781 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:23Z","lastTransitionTime":"2025-12-05T19:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.181919 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.182018 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.182044 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.182079 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.182103 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:23Z","lastTransitionTime":"2025-12-05T19:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.284629 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.284691 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.284706 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.284727 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.284751 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:23Z","lastTransitionTime":"2025-12-05T19:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.387238 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.387299 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.387316 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.387341 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.387359 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:23Z","lastTransitionTime":"2025-12-05T19:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.819183 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.819209 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.819218 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.819231 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.819239 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:23Z","lastTransitionTime":"2025-12-05T19:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.921931 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.922009 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.922035 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.922075 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:23 crc kubenswrapper[4982]: I1205 19:14:23.922097 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:23Z","lastTransitionTime":"2025-12-05T19:14:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.025663 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.025723 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.025738 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.025765 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.025782 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:24Z","lastTransitionTime":"2025-12-05T19:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.128371 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.128440 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.128459 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.128481 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.128495 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:24Z","lastTransitionTime":"2025-12-05T19:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.139648 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-6r5ns"] Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.140616 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:24 crc kubenswrapper[4982]: E1205 19:14:24.140726 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.157290 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.179736 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.195611 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.213552 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.227785 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.231661 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.231721 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.231736 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.231759 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.231773 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:24Z","lastTransitionTime":"2025-12-05T19:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.240601 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.257474 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.272632 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.284575 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.284626 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zqd8x\" (UniqueName: \"kubernetes.io/projected/4d68836a-462d-4364-bc12-b530a7cb0727-kube-api-access-zqd8x\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.285861 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.300762 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.316949 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.331689 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.335121 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.335180 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.335198 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.335222 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.335234 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:24Z","lastTransitionTime":"2025-12-05T19:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.351257 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.367544 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.380566 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.385942 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.386008 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zqd8x\" (UniqueName: \"kubernetes.io/projected/4d68836a-462d-4364-bc12-b530a7cb0727-kube-api-access-zqd8x\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:24 crc kubenswrapper[4982]: E1205 19:14:24.386214 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:24 crc kubenswrapper[4982]: E1205 19:14:24.386313 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs podName:4d68836a-462d-4364-bc12-b530a7cb0727 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:24.886287805 +0000 UTC m=+43.768173800 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs") pod "network-metrics-daemon-6r5ns" (UID: "4d68836a-462d-4364-bc12-b530a7cb0727") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.390083 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.390096 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.390097 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:24 crc kubenswrapper[4982]: E1205 19:14:24.390328 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:24 crc kubenswrapper[4982]: E1205 19:14:24.390455 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:24 crc kubenswrapper[4982]: E1205 19:14:24.390536 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.391721 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.408800 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zqd8x\" (UniqueName: \"kubernetes.io/projected/4d68836a-462d-4364-bc12-b530a7cb0727-kube-api-access-zqd8x\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.410326 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:21Z\\\",\\\"message\\\":\\\"onfiguring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-multus/multus-admission-controller]} name:Service_openshift-multus/multus-admission-controller_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.119:443: 10.217.5.119:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d4efc4a8-c514-4a6b-901c-2953978b50d3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 19:14:20.468493 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.netwo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.437579 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.437613 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.437624 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.437644 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.437656 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:24Z","lastTransitionTime":"2025-12-05T19:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.540783 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.540825 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.540834 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.540848 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.540860 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:24Z","lastTransitionTime":"2025-12-05T19:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.643998 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.644443 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.644512 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.644929 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.645011 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:24Z","lastTransitionTime":"2025-12-05T19:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.747840 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.747898 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.747910 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.747929 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.747942 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:24Z","lastTransitionTime":"2025-12-05T19:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.826439 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" event={"ID":"ade8a138-629c-4ff7-8c39-c86ab2bb6a98","Type":"ContainerStarted","Data":"fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.826512 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" event={"ID":"ade8a138-629c-4ff7-8c39-c86ab2bb6a98","Type":"ContainerStarted","Data":"2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.851531 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.851578 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.851593 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.851613 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.851626 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:24Z","lastTransitionTime":"2025-12-05T19:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.853038 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:21Z\\\",\\\"message\\\":\\\"onfiguring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-multus/multus-admission-controller]} name:Service_openshift-multus/multus-admission-controller_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.119:443: 10.217.5.119:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d4efc4a8-c514-4a6b-901c-2953978b50d3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 19:14:20.468493 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.netwo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.867182 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.878017 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.893204 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:24 crc kubenswrapper[4982]: E1205 19:14:24.893351 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:24 crc kubenswrapper[4982]: E1205 19:14:24.893400 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs podName:4d68836a-462d-4364-bc12-b530a7cb0727 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:25.893387541 +0000 UTC m=+44.775273536 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs") pod "network-metrics-daemon-6r5ns" (UID: "4d68836a-462d-4364-bc12-b530a7cb0727") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.893935 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.908908 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.934224 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.949527 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.954900 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.954947 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.954963 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.954997 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.955017 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:24Z","lastTransitionTime":"2025-12-05T19:14:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.964011 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.979056 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:24 crc kubenswrapper[4982]: I1205 19:14:24.992720 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:24Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.007771 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:25Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.022870 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:25Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.042507 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:25Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.057140 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:25Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.057780 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.057840 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.057859 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.057884 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.057901 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:25Z","lastTransitionTime":"2025-12-05T19:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.072680 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:25Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.090281 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:25Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.101698 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:25Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.162009 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.162074 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.162091 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.162120 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.162137 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:25Z","lastTransitionTime":"2025-12-05T19:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.265859 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.266352 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.266375 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.266399 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.266420 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:25Z","lastTransitionTime":"2025-12-05T19:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.369814 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.369880 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.369907 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.369934 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.369955 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:25Z","lastTransitionTime":"2025-12-05T19:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.472987 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.473042 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.473061 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.473084 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.473101 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:25Z","lastTransitionTime":"2025-12-05T19:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.576104 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.576286 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.576308 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.576332 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.576351 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:25Z","lastTransitionTime":"2025-12-05T19:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.679327 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.679409 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.679442 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.679510 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.679532 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:25Z","lastTransitionTime":"2025-12-05T19:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.783385 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.783449 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.783463 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.783486 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.783501 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:25Z","lastTransitionTime":"2025-12-05T19:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.887392 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.887473 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.887507 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.887545 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.887575 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:25Z","lastTransitionTime":"2025-12-05T19:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.904394 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:25 crc kubenswrapper[4982]: E1205 19:14:25.904649 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:25 crc kubenswrapper[4982]: E1205 19:14:25.904757 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs podName:4d68836a-462d-4364-bc12-b530a7cb0727 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:27.904733848 +0000 UTC m=+46.786619853 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs") pod "network-metrics-daemon-6r5ns" (UID: "4d68836a-462d-4364-bc12-b530a7cb0727") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.991376 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.991457 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.991481 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.991509 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:25 crc kubenswrapper[4982]: I1205 19:14:25.991527 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:25Z","lastTransitionTime":"2025-12-05T19:14:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.095648 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.095708 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.095762 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.095799 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.095823 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:26Z","lastTransitionTime":"2025-12-05T19:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.198175 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.198242 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.198255 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.198271 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.198282 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:26Z","lastTransitionTime":"2025-12-05T19:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.301977 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.302022 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.302038 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.302057 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.302070 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:26Z","lastTransitionTime":"2025-12-05T19:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.389368 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.389423 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.389483 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.389525 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:26 crc kubenswrapper[4982]: E1205 19:14:26.389695 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:26 crc kubenswrapper[4982]: E1205 19:14:26.389885 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:26 crc kubenswrapper[4982]: E1205 19:14:26.390089 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:26 crc kubenswrapper[4982]: E1205 19:14:26.390258 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.404348 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.404510 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.404624 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.404731 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.404838 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:26Z","lastTransitionTime":"2025-12-05T19:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.507962 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.508524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.508680 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.508821 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.508955 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:26Z","lastTransitionTime":"2025-12-05T19:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.611768 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.611836 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.611857 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.611883 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.611901 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:26Z","lastTransitionTime":"2025-12-05T19:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.715419 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.715482 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.715498 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.715524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.715544 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:26Z","lastTransitionTime":"2025-12-05T19:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.819123 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.819221 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.819240 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.819266 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.819284 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:26Z","lastTransitionTime":"2025-12-05T19:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.921957 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.922044 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.922070 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.922107 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:26 crc kubenswrapper[4982]: I1205 19:14:26.922133 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:26Z","lastTransitionTime":"2025-12-05T19:14:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.025808 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.025882 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.025911 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.025942 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.025966 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:27Z","lastTransitionTime":"2025-12-05T19:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.129068 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.129143 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.129190 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.129214 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.129231 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:27Z","lastTransitionTime":"2025-12-05T19:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.231702 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.231760 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.231777 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.231803 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.231820 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:27Z","lastTransitionTime":"2025-12-05T19:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.334052 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.334433 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.334537 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.336409 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.336489 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:27Z","lastTransitionTime":"2025-12-05T19:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.439435 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.439524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.439547 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.439581 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.439603 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:27Z","lastTransitionTime":"2025-12-05T19:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.543069 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.543129 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.543176 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.543201 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.543218 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:27Z","lastTransitionTime":"2025-12-05T19:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.646529 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.646578 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.646595 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.646618 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.646636 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:27Z","lastTransitionTime":"2025-12-05T19:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.749937 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.749982 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.749998 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.750020 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.750038 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:27Z","lastTransitionTime":"2025-12-05T19:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.853100 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.853266 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.853285 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.853310 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.853331 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:27Z","lastTransitionTime":"2025-12-05T19:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.928951 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:27 crc kubenswrapper[4982]: E1205 19:14:27.929184 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:27 crc kubenswrapper[4982]: E1205 19:14:27.929302 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs podName:4d68836a-462d-4364-bc12-b530a7cb0727 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:31.929273082 +0000 UTC m=+50.811159107 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs") pod "network-metrics-daemon-6r5ns" (UID: "4d68836a-462d-4364-bc12-b530a7cb0727") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.956345 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.956410 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.956435 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.956464 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:27 crc kubenswrapper[4982]: I1205 19:14:27.956491 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:27Z","lastTransitionTime":"2025-12-05T19:14:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.059590 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.059651 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.059675 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.059703 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.059726 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.158271 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.158351 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.158375 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.158973 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.159001 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: E1205 19:14:28.180949 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:28Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.186659 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.186713 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.186725 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.186746 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.186758 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: E1205 19:14:28.203997 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:28Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.208701 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.208737 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.208808 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.208830 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.208842 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: E1205 19:14:28.227301 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:28Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.232264 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.232296 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.232308 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.232326 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.232340 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: E1205 19:14:28.247990 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:28Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.253399 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.253448 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.253461 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.253480 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.253496 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: E1205 19:14:28.270829 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:28Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:28Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:28 crc kubenswrapper[4982]: E1205 19:14:28.271077 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.273302 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.273365 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.273385 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.273410 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.273428 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.377295 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.377352 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.377374 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.377401 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.377423 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.389678 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.389758 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.389691 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:28 crc kubenswrapper[4982]: E1205 19:14:28.389833 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.389869 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:28 crc kubenswrapper[4982]: E1205 19:14:28.390064 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:28 crc kubenswrapper[4982]: E1205 19:14:28.390252 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:28 crc kubenswrapper[4982]: E1205 19:14:28.390304 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.480731 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.480823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.480842 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.480869 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.480888 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.584238 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.584310 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.584334 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.584363 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.584389 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.687521 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.687567 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.687578 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.687597 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.687609 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.790175 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.790217 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.790228 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.790266 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.790277 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.893514 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.893577 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.893595 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.893620 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.893638 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.998265 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.998337 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.998354 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.998380 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:28 crc kubenswrapper[4982]: I1205 19:14:28.998399 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:28Z","lastTransitionTime":"2025-12-05T19:14:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.101697 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.101752 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.101773 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.101796 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.101810 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:29Z","lastTransitionTime":"2025-12-05T19:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.205712 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.205769 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.205785 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.205804 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.205820 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:29Z","lastTransitionTime":"2025-12-05T19:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.309375 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.309476 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.309496 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.309886 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.310192 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:29Z","lastTransitionTime":"2025-12-05T19:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.413542 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.413611 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.413626 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.413650 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.413664 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:29Z","lastTransitionTime":"2025-12-05T19:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.516866 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.516935 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.516954 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.516981 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.516999 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:29Z","lastTransitionTime":"2025-12-05T19:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.620400 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.620458 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.620475 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.620500 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.620517 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:29Z","lastTransitionTime":"2025-12-05T19:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.723324 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.723400 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.723425 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.723456 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.723481 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:29Z","lastTransitionTime":"2025-12-05T19:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.826746 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.826834 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.826846 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.826865 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.826878 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:29Z","lastTransitionTime":"2025-12-05T19:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.930026 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.930089 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.930099 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.930118 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:29 crc kubenswrapper[4982]: I1205 19:14:29.930129 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:29Z","lastTransitionTime":"2025-12-05T19:14:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.033625 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.033687 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.033704 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.033737 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.033760 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:30Z","lastTransitionTime":"2025-12-05T19:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.137547 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.137639 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.137663 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.137871 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.137891 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:30Z","lastTransitionTime":"2025-12-05T19:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.240654 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.240737 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.240771 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.240802 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.240826 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:30Z","lastTransitionTime":"2025-12-05T19:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.344538 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.344588 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.344604 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.344627 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.344641 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:30Z","lastTransitionTime":"2025-12-05T19:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.389518 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.389569 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.389518 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.389708 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:30 crc kubenswrapper[4982]: E1205 19:14:30.389874 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:30 crc kubenswrapper[4982]: E1205 19:14:30.390065 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:30 crc kubenswrapper[4982]: E1205 19:14:30.390143 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:30 crc kubenswrapper[4982]: E1205 19:14:30.390312 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.447030 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.447128 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.447182 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.447213 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.447232 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:30Z","lastTransitionTime":"2025-12-05T19:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.551204 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.551275 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.551299 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.551331 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.551355 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:30Z","lastTransitionTime":"2025-12-05T19:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.654296 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.654349 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.654367 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.654392 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.654409 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:30Z","lastTransitionTime":"2025-12-05T19:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.758767 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.758842 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.758868 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.758900 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.758925 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:30Z","lastTransitionTime":"2025-12-05T19:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.861343 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.861382 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.861390 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.861405 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.861414 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:30Z","lastTransitionTime":"2025-12-05T19:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.964874 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.964935 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.964958 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.964984 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:30 crc kubenswrapper[4982]: I1205 19:14:30.965002 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:30Z","lastTransitionTime":"2025-12-05T19:14:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.068652 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.068912 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.068947 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.069028 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.069064 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:31Z","lastTransitionTime":"2025-12-05T19:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.171719 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.171773 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.171790 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.171813 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.171830 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:31Z","lastTransitionTime":"2025-12-05T19:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.274795 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.274859 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.274881 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.274907 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.274927 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:31Z","lastTransitionTime":"2025-12-05T19:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.378650 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.378728 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.378752 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.378782 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.378805 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:31Z","lastTransitionTime":"2025-12-05T19:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.416273 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.434568 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.452012 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.466450 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.481915 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.482023 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.482049 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.482079 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.482100 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:31Z","lastTransitionTime":"2025-12-05T19:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.496535 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:21Z\\\",\\\"message\\\":\\\"onfiguring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-multus/multus-admission-controller]} name:Service_openshift-multus/multus-admission-controller_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.119:443: 10.217.5.119:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d4efc4a8-c514-4a6b-901c-2953978b50d3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 19:14:20.468493 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.netwo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.518227 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.534512 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.560108 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.582120 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.585000 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.585047 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.585058 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.585077 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.585089 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:31Z","lastTransitionTime":"2025-12-05T19:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.604330 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.623583 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.641677 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.660725 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.679888 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.688058 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.688102 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.688116 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.688135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.688170 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:31Z","lastTransitionTime":"2025-12-05T19:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.697863 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.711721 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.724725 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:31Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.790744 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.790814 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.790830 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.790857 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.790874 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:31Z","lastTransitionTime":"2025-12-05T19:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.893738 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.893823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.893850 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.893885 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.893922 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:31Z","lastTransitionTime":"2025-12-05T19:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.971687 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:31 crc kubenswrapper[4982]: E1205 19:14:31.971986 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:31 crc kubenswrapper[4982]: E1205 19:14:31.972108 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs podName:4d68836a-462d-4364-bc12-b530a7cb0727 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:39.972081086 +0000 UTC m=+58.853967121 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs") pod "network-metrics-daemon-6r5ns" (UID: "4d68836a-462d-4364-bc12-b530a7cb0727") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.996702 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.996786 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.996803 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.996829 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:31 crc kubenswrapper[4982]: I1205 19:14:31.996847 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:31Z","lastTransitionTime":"2025-12-05T19:14:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.099655 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.099713 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.099735 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.099768 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.099791 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:32Z","lastTransitionTime":"2025-12-05T19:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.203063 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.203180 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.203202 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.203228 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.203247 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:32Z","lastTransitionTime":"2025-12-05T19:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.305784 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.305846 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.305869 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.305901 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.305925 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:32Z","lastTransitionTime":"2025-12-05T19:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.390247 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.390297 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.390311 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.390339 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:32 crc kubenswrapper[4982]: E1205 19:14:32.390429 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:32 crc kubenswrapper[4982]: E1205 19:14:32.390580 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:32 crc kubenswrapper[4982]: E1205 19:14:32.390783 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:32 crc kubenswrapper[4982]: E1205 19:14:32.390997 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.408596 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.408675 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.408703 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.408741 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.408765 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:32Z","lastTransitionTime":"2025-12-05T19:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.512459 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.512596 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.512618 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.512645 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.512664 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:32Z","lastTransitionTime":"2025-12-05T19:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.615911 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.615976 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.615999 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.616029 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.616052 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:32Z","lastTransitionTime":"2025-12-05T19:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.725581 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.725652 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.725673 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.725701 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.725727 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:32Z","lastTransitionTime":"2025-12-05T19:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.828774 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.828887 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.828911 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.828942 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.829020 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:32Z","lastTransitionTime":"2025-12-05T19:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.932518 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.932623 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.932716 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.932748 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:32 crc kubenswrapper[4982]: I1205 19:14:32.932769 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:32Z","lastTransitionTime":"2025-12-05T19:14:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.035702 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.035773 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.035796 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.035825 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.035848 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:33Z","lastTransitionTime":"2025-12-05T19:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.138626 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.138699 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.138714 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.138741 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.138764 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:33Z","lastTransitionTime":"2025-12-05T19:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.242272 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.242330 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.242350 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.242376 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.242396 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:33Z","lastTransitionTime":"2025-12-05T19:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.345018 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.345087 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.345110 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.345136 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.345197 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:33Z","lastTransitionTime":"2025-12-05T19:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.391492 4982 scope.go:117] "RemoveContainer" containerID="531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.447546 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.447591 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.447601 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.447616 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.447638 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:33Z","lastTransitionTime":"2025-12-05T19:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.550987 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.551059 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.551084 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.551114 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.551136 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:33Z","lastTransitionTime":"2025-12-05T19:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.653595 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.653645 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.653659 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.653678 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.653690 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:33Z","lastTransitionTime":"2025-12-05T19:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.756942 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.756993 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.757004 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.757023 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.757035 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:33Z","lastTransitionTime":"2025-12-05T19:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.859032 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.859082 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.859091 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.859112 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.859123 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:33Z","lastTransitionTime":"2025-12-05T19:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.861746 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/1.log" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.865464 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef"} Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.866033 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.891669 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:33Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.909030 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:33Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.932573 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:33Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.946901 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:33Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.962119 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.962196 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.962211 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.962231 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.962240 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:33Z","lastTransitionTime":"2025-12-05T19:14:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.968847 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:33Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.981712 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:33Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:33 crc kubenswrapper[4982]: I1205 19:14:33.995919 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:33Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.008169 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.022504 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.035539 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.047390 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.061946 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.065366 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.065402 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.065411 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.065431 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.065444 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:34Z","lastTransitionTime":"2025-12-05T19:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.078309 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.099106 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.116674 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.129359 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.156080 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:21Z\\\",\\\"message\\\":\\\"onfiguring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-multus/multus-admission-controller]} name:Service_openshift-multus/multus-admission-controller_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.119:443: 10.217.5.119:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d4efc4a8-c514-4a6b-901c-2953978b50d3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 19:14:20.468493 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.netwo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.167749 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.167811 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.167827 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.167849 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.167862 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:34Z","lastTransitionTime":"2025-12-05T19:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.270627 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.270696 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.270712 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.270738 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.270754 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:34Z","lastTransitionTime":"2025-12-05T19:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.378080 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.378219 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.378240 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.378255 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.378266 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:34Z","lastTransitionTime":"2025-12-05T19:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.389446 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:34 crc kubenswrapper[4982]: E1205 19:14:34.389604 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.389984 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:34 crc kubenswrapper[4982]: E1205 19:14:34.390043 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.390089 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:34 crc kubenswrapper[4982]: E1205 19:14:34.390137 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.390196 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:34 crc kubenswrapper[4982]: E1205 19:14:34.390253 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.480871 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.480964 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.480985 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.481023 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.481052 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:34Z","lastTransitionTime":"2025-12-05T19:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.586125 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.586171 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.586182 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.586196 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.586205 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:34Z","lastTransitionTime":"2025-12-05T19:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.689510 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.689885 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.690079 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.690265 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.690414 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:34Z","lastTransitionTime":"2025-12-05T19:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.793875 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.793936 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.793954 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.793977 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.793995 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:34Z","lastTransitionTime":"2025-12-05T19:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.872485 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/2.log" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.873716 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/1.log" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.878613 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef" exitCode=1 Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.878690 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef"} Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.878785 4982 scope.go:117] "RemoveContainer" containerID="531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.881349 4982 scope.go:117] "RemoveContainer" containerID="a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef" Dec 05 19:14:34 crc kubenswrapper[4982]: E1205 19:14:34.881924 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.882336 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.899310 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.899379 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.899396 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.899422 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.899440 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:34Z","lastTransitionTime":"2025-12-05T19:14:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.901235 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.903087 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.926073 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.944277 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.957031 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:34 crc kubenswrapper[4982]: I1205 19:14:34.989813 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:21Z\\\",\\\"message\\\":\\\"onfiguring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-multus/multus-admission-controller]} name:Service_openshift-multus/multus-admission-controller_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.119:443: 10.217.5.119:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d4efc4a8-c514-4a6b-901c-2953978b50d3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 19:14:20.468493 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.netwo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"message\\\":\\\"n-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:34.360421 6626 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:34.360703 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 19:14:34.359871 6626 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 19:14:34.360938 6626 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 19:14:34.360964 6626 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:34.361040 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 19:14:34.361099 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 19:14:34.361110 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 19:14:34.361130 6626 factory.go:656] Stopping watch factory\\\\nI1205 19:14:34.361162 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 19:14:34.361173 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 19:14:34.361181 6626 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 19:14:34.361405 6626 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:34Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.003625 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.003748 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.003766 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.003791 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.003808 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:35Z","lastTransitionTime":"2025-12-05T19:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.007593 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.029527 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.046542 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.071550 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.087369 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.106633 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.106710 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.106736 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.106772 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.106811 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:35Z","lastTransitionTime":"2025-12-05T19:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.107124 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.119863 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.136835 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.152551 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.168490 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.184838 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.197939 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.210365 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.210423 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.210441 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.210466 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.210485 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:35Z","lastTransitionTime":"2025-12-05T19:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.214879 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.229083 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.250085 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.267913 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.297550 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.313036 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.313775 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.313831 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.313852 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.313876 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.313895 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:35Z","lastTransitionTime":"2025-12-05T19:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.329015 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.340515 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.354250 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.368574 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.386218 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.399047 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.415823 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.416093 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.416122 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.416131 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.416159 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.416169 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:35Z","lastTransitionTime":"2025-12-05T19:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.433732 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.449117 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.460878 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.476811 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c34cc4f7-08ac-4266-b651-3926d3970c4d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdecba63a5b8fd4acc03e2440acafeabcdefcdb278f037117c7b5de2cf445e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02cf255b6f308b2b5209ba4b2a4d0ccf9e6ba461ddde390eb6ee53ea78c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71882afeb562c377bde2acd7f21f8e3431eb531d7f9f07930ae174dd34f60940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.499476 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://531060f8e74eb0b8c943927da21b26601f8be98c64862bcabfa926709b0be393\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:21Z\\\",\\\"message\\\":\\\"onfiguring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-multus/multus-admission-controller]} name:Service_openshift-multus/multus-admission-controller_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.119:443: 10.217.5.119:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {d4efc4a8-c514-4a6b-901c-2953978b50d3}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF1205 19:14:20.468493 6418 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: failed to add event handler: handler {0x1e60340 0x1e60020 0x1e5ffc0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.netwo\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"message\\\":\\\"n-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:34.360421 6626 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:34.360703 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 19:14:34.359871 6626 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 19:14:34.360938 6626 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 19:14:34.360964 6626 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:34.361040 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 19:14:34.361099 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 19:14:34.361110 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 19:14:34.361130 6626 factory.go:656] Stopping watch factory\\\\nI1205 19:14:34.361162 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 19:14:34.361173 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 19:14:34.361181 6626 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 19:14:34.361405 6626 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.519605 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.519650 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.519661 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.519678 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.519692 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:35Z","lastTransitionTime":"2025-12-05T19:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.622692 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.622725 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.622734 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.622765 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.622775 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:35Z","lastTransitionTime":"2025-12-05T19:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.725384 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.725451 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.725474 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.725502 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.725525 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:35Z","lastTransitionTime":"2025-12-05T19:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.828558 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.828648 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.828668 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.828693 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.828712 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:35Z","lastTransitionTime":"2025-12-05T19:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.884968 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/2.log" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.891355 4982 scope.go:117] "RemoveContainer" containerID="a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef" Dec 05 19:14:35 crc kubenswrapper[4982]: E1205 19:14:35.891657 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.909515 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.932796 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.932875 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.932900 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.932931 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.932955 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:35Z","lastTransitionTime":"2025-12-05T19:14:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.936447 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.956920 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.976647 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:35 crc kubenswrapper[4982]: I1205 19:14:35.996635 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:35Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.021396 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.037142 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.037254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.037272 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.037301 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.037321 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:36Z","lastTransitionTime":"2025-12-05T19:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.043371 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.065581 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.078551 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.090079 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c34cc4f7-08ac-4266-b651-3926d3970c4d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdecba63a5b8fd4acc03e2440acafeabcdefcdb278f037117c7b5de2cf445e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02cf255b6f308b2b5209ba4b2a4d0ccf9e6ba461ddde390eb6ee53ea78c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71882afeb562c377bde2acd7f21f8e3431eb531d7f9f07930ae174dd34f60940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.109139 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"message\\\":\\\"n-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:34.360421 6626 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:34.360703 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 19:14:34.359871 6626 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 19:14:34.360938 6626 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 19:14:34.360964 6626 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:34.361040 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 19:14:34.361099 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 19:14:34.361110 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 19:14:34.361130 6626 factory.go:656] Stopping watch factory\\\\nI1205 19:14:34.361162 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 19:14:34.361173 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 19:14:34.361181 6626 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 19:14:34.361405 6626 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.120754 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.133702 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.139498 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.139549 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.139565 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.139586 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.139601 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:36Z","lastTransitionTime":"2025-12-05T19:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.152707 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.166205 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.188912 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.213279 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.227974 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:36Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.233401 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.233538 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:15:08.233520811 +0000 UTC m=+87.115406806 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.242281 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.242348 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.242363 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.242384 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.242397 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:36Z","lastTransitionTime":"2025-12-05T19:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.334224 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.334300 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.334371 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.334410 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.334542 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.334621 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.334642 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.334665 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.334667 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.334923 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.334946 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.334565 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.334735 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 19:15:08.33470091 +0000 UTC m=+87.216586905 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.335212 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:15:08.335140022 +0000 UTC m=+87.217026177 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.335270 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 19:15:08.335249295 +0000 UTC m=+87.217135530 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.335313 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:15:08.335296856 +0000 UTC m=+87.217183121 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.346735 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.346832 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.346859 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.346966 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.347130 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:36Z","lastTransitionTime":"2025-12-05T19:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.389717 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.389817 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.389921 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.389958 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.389948 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.390145 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.390431 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:36 crc kubenswrapper[4982]: E1205 19:14:36.390968 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.450209 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.450527 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.450716 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.450918 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.451080 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:36Z","lastTransitionTime":"2025-12-05T19:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.555386 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.555890 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.556136 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.556427 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.556683 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:36Z","lastTransitionTime":"2025-12-05T19:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.660459 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.660549 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.660599 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.660628 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.660648 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:36Z","lastTransitionTime":"2025-12-05T19:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.763506 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.763571 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.763589 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.763613 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.763631 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:36Z","lastTransitionTime":"2025-12-05T19:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.866712 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.866776 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.866796 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.866844 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.866863 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:36Z","lastTransitionTime":"2025-12-05T19:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.969725 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.969783 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.969801 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.969823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:36 crc kubenswrapper[4982]: I1205 19:14:36.969841 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:36Z","lastTransitionTime":"2025-12-05T19:14:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.072288 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.072338 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.072349 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.072365 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.072374 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:37Z","lastTransitionTime":"2025-12-05T19:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.175784 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.175869 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.175888 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.175920 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.175940 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:37Z","lastTransitionTime":"2025-12-05T19:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.280182 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.280250 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.280268 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.280293 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.280309 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:37Z","lastTransitionTime":"2025-12-05T19:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.383581 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.383666 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.383691 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.383722 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.383743 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:37Z","lastTransitionTime":"2025-12-05T19:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.487026 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.487077 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.487086 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.487103 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.487119 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:37Z","lastTransitionTime":"2025-12-05T19:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.590433 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.590520 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.590540 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.590571 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.590594 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:37Z","lastTransitionTime":"2025-12-05T19:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.693561 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.693613 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.693632 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.693657 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.693673 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:37Z","lastTransitionTime":"2025-12-05T19:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.796786 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.796895 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.796917 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.796942 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.796959 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:37Z","lastTransitionTime":"2025-12-05T19:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.899961 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.900242 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.900260 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.900283 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:37 crc kubenswrapper[4982]: I1205 19:14:37.900300 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:37Z","lastTransitionTime":"2025-12-05T19:14:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.003591 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.003678 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.003711 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.003742 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.003764 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.106803 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.106881 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.106906 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.106933 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.106966 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.209785 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.209853 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.209871 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.209893 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.209914 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.313096 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.313217 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.313246 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.313272 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.313294 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.390106 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.390206 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.390130 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:38 crc kubenswrapper[4982]: E1205 19:14:38.390366 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:38 crc kubenswrapper[4982]: E1205 19:14:38.390453 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.390510 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:38 crc kubenswrapper[4982]: E1205 19:14:38.390563 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:38 crc kubenswrapper[4982]: E1205 19:14:38.390638 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.416724 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.416793 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.416815 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.416845 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.416868 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.496231 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.496286 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.496303 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.496338 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.496356 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: E1205 19:14:38.519037 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:38Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.524590 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.524641 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.524658 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.524685 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.524704 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: E1205 19:14:38.554219 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:38Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.559609 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.559670 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.559694 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.559724 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.559745 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: E1205 19:14:38.577077 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:38Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.582601 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.582669 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.582682 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.582709 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.582728 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: E1205 19:14:38.598215 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:38Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.604074 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.604137 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.604184 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.604211 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.604237 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: E1205 19:14:38.621035 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:38Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:38Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:38 crc kubenswrapper[4982]: E1205 19:14:38.621450 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.623744 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.623797 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.623813 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.623836 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.623855 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.726709 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.726892 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.726921 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.726953 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.726977 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.829972 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.830019 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.830032 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.830061 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.830076 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.932911 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.932968 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.932980 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.932999 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:38 crc kubenswrapper[4982]: I1205 19:14:38.933012 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:38Z","lastTransitionTime":"2025-12-05T19:14:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.036254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.036331 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.036368 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.036404 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.036427 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:39Z","lastTransitionTime":"2025-12-05T19:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.139818 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.139871 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.139887 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.139911 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.139928 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:39Z","lastTransitionTime":"2025-12-05T19:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.243780 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.243861 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.243886 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.243915 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.243939 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:39Z","lastTransitionTime":"2025-12-05T19:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.347860 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.347913 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.347945 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.347972 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.347987 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:39Z","lastTransitionTime":"2025-12-05T19:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.451523 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.451646 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.451700 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.451727 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.451745 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:39Z","lastTransitionTime":"2025-12-05T19:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.555304 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.555356 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.555367 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.555386 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.555397 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:39Z","lastTransitionTime":"2025-12-05T19:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.658744 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.658832 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.658887 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.658912 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.658933 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:39Z","lastTransitionTime":"2025-12-05T19:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.762034 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.762096 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.762113 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.762139 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.762188 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:39Z","lastTransitionTime":"2025-12-05T19:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.865582 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.865633 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.865647 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.865664 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.865676 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:39Z","lastTransitionTime":"2025-12-05T19:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.968339 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.968450 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.968464 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.968486 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.968501 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:39Z","lastTransitionTime":"2025-12-05T19:14:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:39 crc kubenswrapper[4982]: I1205 19:14:39.977175 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:39 crc kubenswrapper[4982]: E1205 19:14:39.977443 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:39 crc kubenswrapper[4982]: E1205 19:14:39.977576 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs podName:4d68836a-462d-4364-bc12-b530a7cb0727 nodeName:}" failed. No retries permitted until 2025-12-05 19:14:55.977544811 +0000 UTC m=+74.859430986 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs") pod "network-metrics-daemon-6r5ns" (UID: "4d68836a-462d-4364-bc12-b530a7cb0727") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.071571 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.071644 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.071659 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.071682 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.071709 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:40Z","lastTransitionTime":"2025-12-05T19:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.175217 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.175287 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.175302 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.175324 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.175340 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:40Z","lastTransitionTime":"2025-12-05T19:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.279779 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.279823 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.279835 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.279854 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.279867 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:40Z","lastTransitionTime":"2025-12-05T19:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.381976 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.382024 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.382037 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.382094 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.382108 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:40Z","lastTransitionTime":"2025-12-05T19:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.390287 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.390344 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.390362 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.390388 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:40 crc kubenswrapper[4982]: E1205 19:14:40.390439 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:40 crc kubenswrapper[4982]: E1205 19:14:40.390593 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:40 crc kubenswrapper[4982]: E1205 19:14:40.390760 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:40 crc kubenswrapper[4982]: E1205 19:14:40.390883 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.484318 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.484359 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.484370 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.484388 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.484402 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:40Z","lastTransitionTime":"2025-12-05T19:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.587822 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.588008 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.588039 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.588075 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.588136 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:40Z","lastTransitionTime":"2025-12-05T19:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.691197 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.691256 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.691273 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.691301 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.691319 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:40Z","lastTransitionTime":"2025-12-05T19:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.794205 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.794261 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.794283 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.794313 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.794336 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:40Z","lastTransitionTime":"2025-12-05T19:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.897707 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.897771 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.897792 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.897818 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:40 crc kubenswrapper[4982]: I1205 19:14:40.897835 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:40Z","lastTransitionTime":"2025-12-05T19:14:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.000586 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.000643 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.000660 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.000683 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.000700 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:41Z","lastTransitionTime":"2025-12-05T19:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.103965 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.104067 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.104090 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.104121 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.104141 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:41Z","lastTransitionTime":"2025-12-05T19:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.207285 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.207335 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.207375 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.207399 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.207415 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:41Z","lastTransitionTime":"2025-12-05T19:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.310405 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.310460 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.310470 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.310492 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.310507 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:41Z","lastTransitionTime":"2025-12-05T19:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.412892 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.413337 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.413374 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.413386 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.413402 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.413416 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:41Z","lastTransitionTime":"2025-12-05T19:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.429425 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.452176 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.465397 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.480189 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.497767 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.508577 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.515959 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.515996 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.516008 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.516024 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.516034 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:41Z","lastTransitionTime":"2025-12-05T19:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.521338 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.533495 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.545223 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.556286 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.564202 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.575456 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.585546 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.596172 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.606882 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.618351 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c34cc4f7-08ac-4266-b651-3926d3970c4d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdecba63a5b8fd4acc03e2440acafeabcdefcdb278f037117c7b5de2cf445e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02cf255b6f308b2b5209ba4b2a4d0ccf9e6ba461ddde390eb6ee53ea78c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71882afeb562c377bde2acd7f21f8e3431eb531d7f9f07930ae174dd34f60940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.618571 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.618592 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.618603 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.618617 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.618626 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:41Z","lastTransitionTime":"2025-12-05T19:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.639636 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"message\\\":\\\"n-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:34.360421 6626 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:34.360703 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 19:14:34.359871 6626 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 19:14:34.360938 6626 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 19:14:34.360964 6626 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:34.361040 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 19:14:34.361099 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 19:14:34.361110 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 19:14:34.361130 6626 factory.go:656] Stopping watch factory\\\\nI1205 19:14:34.361162 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 19:14:34.361173 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 19:14:34.361181 6626 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 19:14:34.361405 6626 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:41Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.720925 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.720960 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.720970 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.720985 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.720996 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:41Z","lastTransitionTime":"2025-12-05T19:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.823951 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.824014 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.824027 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.824051 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.824070 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:41Z","lastTransitionTime":"2025-12-05T19:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.926962 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.927038 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.927054 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.927080 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:41 crc kubenswrapper[4982]: I1205 19:14:41.927101 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:41Z","lastTransitionTime":"2025-12-05T19:14:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.030608 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.030953 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.030971 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.030997 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.031014 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:42Z","lastTransitionTime":"2025-12-05T19:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.159652 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.159708 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.159717 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.159732 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.159741 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:42Z","lastTransitionTime":"2025-12-05T19:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.262092 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.262131 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.262140 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.262171 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.262181 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:42Z","lastTransitionTime":"2025-12-05T19:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.365419 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.365489 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.365508 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.365541 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.365562 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:42Z","lastTransitionTime":"2025-12-05T19:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.389749 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:42 crc kubenswrapper[4982]: E1205 19:14:42.389936 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.390495 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.390524 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:42 crc kubenswrapper[4982]: E1205 19:14:42.390698 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.390525 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:42 crc kubenswrapper[4982]: E1205 19:14:42.390917 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:42 crc kubenswrapper[4982]: E1205 19:14:42.391014 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.469447 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.469520 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.469538 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.469563 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.469582 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:42Z","lastTransitionTime":"2025-12-05T19:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.572640 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.572691 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.572700 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.572723 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.572736 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:42Z","lastTransitionTime":"2025-12-05T19:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.675160 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.675213 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.675225 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.675243 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.675256 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:42Z","lastTransitionTime":"2025-12-05T19:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.778875 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.778923 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.778936 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.778953 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.778964 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:42Z","lastTransitionTime":"2025-12-05T19:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.882470 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.882514 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.882525 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.882560 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.882573 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:42Z","lastTransitionTime":"2025-12-05T19:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.987033 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.987191 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.987225 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.988393 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:42 crc kubenswrapper[4982]: I1205 19:14:42.988494 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:42Z","lastTransitionTime":"2025-12-05T19:14:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.091853 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.091905 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.091920 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.091940 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.091954 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:43Z","lastTransitionTime":"2025-12-05T19:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.194700 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.194741 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.194751 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.194764 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.194773 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:43Z","lastTransitionTime":"2025-12-05T19:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.297983 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.298049 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.298071 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.298099 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.298118 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:43Z","lastTransitionTime":"2025-12-05T19:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.405291 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.405335 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.405348 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.405364 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.405375 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:43Z","lastTransitionTime":"2025-12-05T19:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.507324 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.507362 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.507373 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.507386 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.507396 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:43Z","lastTransitionTime":"2025-12-05T19:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.612691 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.612760 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.612784 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.612815 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.612842 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:43Z","lastTransitionTime":"2025-12-05T19:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.716188 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.716243 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.716260 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.716281 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.716299 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:43Z","lastTransitionTime":"2025-12-05T19:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.820050 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.820103 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.820117 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.820134 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.820159 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:43Z","lastTransitionTime":"2025-12-05T19:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.922797 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.922843 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.922853 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.922869 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:43 crc kubenswrapper[4982]: I1205 19:14:43.922880 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:43Z","lastTransitionTime":"2025-12-05T19:14:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.026331 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.026389 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.026406 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.026424 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.026435 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:44Z","lastTransitionTime":"2025-12-05T19:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.128937 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.128981 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.128994 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.129010 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.129021 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:44Z","lastTransitionTime":"2025-12-05T19:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.232470 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.232521 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.232542 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.232564 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.232581 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:44Z","lastTransitionTime":"2025-12-05T19:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.334936 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.334978 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.334986 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.335000 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.335010 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:44Z","lastTransitionTime":"2025-12-05T19:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.389835 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.389909 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.389863 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.389855 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:44 crc kubenswrapper[4982]: E1205 19:14:44.390014 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:44 crc kubenswrapper[4982]: E1205 19:14:44.390132 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:44 crc kubenswrapper[4982]: E1205 19:14:44.390231 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:44 crc kubenswrapper[4982]: E1205 19:14:44.390287 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.437123 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.437233 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.437259 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.437364 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.437457 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:44Z","lastTransitionTime":"2025-12-05T19:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.540072 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.540224 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.540252 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.540282 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.540305 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:44Z","lastTransitionTime":"2025-12-05T19:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.643170 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.643208 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.643216 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.643228 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.643238 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:44Z","lastTransitionTime":"2025-12-05T19:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.747006 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.747077 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.747102 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.747135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.747207 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:44Z","lastTransitionTime":"2025-12-05T19:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.850667 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.850765 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.850783 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.850805 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.850822 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:44Z","lastTransitionTime":"2025-12-05T19:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.953633 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.953663 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.953672 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.953686 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:44 crc kubenswrapper[4982]: I1205 19:14:44.953695 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:44Z","lastTransitionTime":"2025-12-05T19:14:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.056253 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.056330 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.056346 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.056367 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.056379 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:45Z","lastTransitionTime":"2025-12-05T19:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.159729 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.159775 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.159787 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.159814 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.159829 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:45Z","lastTransitionTime":"2025-12-05T19:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.262430 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.262491 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.262508 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.262533 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.262551 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:45Z","lastTransitionTime":"2025-12-05T19:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.365402 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.365466 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.365481 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.365509 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.365522 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:45Z","lastTransitionTime":"2025-12-05T19:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.468471 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.468516 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.468529 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.468550 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.468562 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:45Z","lastTransitionTime":"2025-12-05T19:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.571459 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.571507 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.571518 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.571533 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.571541 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:45Z","lastTransitionTime":"2025-12-05T19:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.674746 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.674789 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.674800 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.674816 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.674828 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:45Z","lastTransitionTime":"2025-12-05T19:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.777109 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.777141 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.777170 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.777186 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.777196 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:45Z","lastTransitionTime":"2025-12-05T19:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.880271 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.880319 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.880335 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.880355 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.880371 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:45Z","lastTransitionTime":"2025-12-05T19:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.983224 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.983250 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.983258 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.983270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:45 crc kubenswrapper[4982]: I1205 19:14:45.983324 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:45Z","lastTransitionTime":"2025-12-05T19:14:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.085214 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.085266 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.085278 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.085303 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.085315 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:46Z","lastTransitionTime":"2025-12-05T19:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.190428 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.190468 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.190476 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.190494 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.190504 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:46Z","lastTransitionTime":"2025-12-05T19:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.293820 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.293863 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.293875 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.293890 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.293899 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:46Z","lastTransitionTime":"2025-12-05T19:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.389959 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.390028 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.390103 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:46 crc kubenswrapper[4982]: E1205 19:14:46.390192 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.390222 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:46 crc kubenswrapper[4982]: E1205 19:14:46.390406 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:46 crc kubenswrapper[4982]: E1205 19:14:46.390514 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:46 crc kubenswrapper[4982]: E1205 19:14:46.390564 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.397089 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.397134 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.397164 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.397187 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.397199 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:46Z","lastTransitionTime":"2025-12-05T19:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.499398 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.499462 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.499474 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.499497 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.499511 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:46Z","lastTransitionTime":"2025-12-05T19:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.602293 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.602359 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.602371 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.602390 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.602402 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:46Z","lastTransitionTime":"2025-12-05T19:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.705126 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.705182 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.705194 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.705210 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.705222 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:46Z","lastTransitionTime":"2025-12-05T19:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.807248 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.807295 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.807307 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.807325 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.807337 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:46Z","lastTransitionTime":"2025-12-05T19:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.910406 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.910465 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.910476 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.910496 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:46 crc kubenswrapper[4982]: I1205 19:14:46.910509 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:46Z","lastTransitionTime":"2025-12-05T19:14:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.013126 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.013215 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.013229 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.013246 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.013258 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:47Z","lastTransitionTime":"2025-12-05T19:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.116093 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.116137 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.116166 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.116186 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.116198 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:47Z","lastTransitionTime":"2025-12-05T19:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.218525 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.218565 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.218578 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.218594 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.218608 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:47Z","lastTransitionTime":"2025-12-05T19:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.320680 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.320717 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.320728 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.320745 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.320755 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:47Z","lastTransitionTime":"2025-12-05T19:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.390218 4982 scope.go:117] "RemoveContainer" containerID="a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef" Dec 05 19:14:47 crc kubenswrapper[4982]: E1205 19:14:47.390490 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.422878 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.422954 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.422969 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.422988 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.423024 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:47Z","lastTransitionTime":"2025-12-05T19:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.525634 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.525709 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.525722 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.525744 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.525755 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:47Z","lastTransitionTime":"2025-12-05T19:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.629246 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.629294 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.629307 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.629328 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.629341 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:47Z","lastTransitionTime":"2025-12-05T19:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.731606 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.731636 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.731647 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.731663 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.731675 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:47Z","lastTransitionTime":"2025-12-05T19:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.834660 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.834714 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.834731 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.834754 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.834766 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:47Z","lastTransitionTime":"2025-12-05T19:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.937703 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.937753 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.937768 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.937788 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:47 crc kubenswrapper[4982]: I1205 19:14:47.937800 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:47Z","lastTransitionTime":"2025-12-05T19:14:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.041403 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.041474 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.041485 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.041505 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.041518 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.143931 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.143991 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.144004 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.144025 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.144039 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.246172 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.246209 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.246220 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.246236 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.246247 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.349769 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.349830 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.349852 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.349881 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.349901 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.389804 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.389806 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:48 crc kubenswrapper[4982]: E1205 19:14:48.389941 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:48 crc kubenswrapper[4982]: E1205 19:14:48.390091 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.390926 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:48 crc kubenswrapper[4982]: E1205 19:14:48.391205 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.391609 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:48 crc kubenswrapper[4982]: E1205 19:14:48.392371 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.453037 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.453309 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.453393 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.453520 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.453606 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.557425 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.557834 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.557988 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.558134 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.558317 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.660954 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.661519 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.661596 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.661695 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.661780 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.718737 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.718828 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.718848 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.718872 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.718891 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: E1205 19:14:48.732687 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:48Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.738298 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.738336 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.738345 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.738359 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.738368 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: E1205 19:14:48.750800 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:48Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.755698 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.755773 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.755799 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.755835 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.755861 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: E1205 19:14:48.770822 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:48Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.775810 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.775861 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.775878 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.775901 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.775917 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: E1205 19:14:48.795398 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:48Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.800299 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.800353 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.800371 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.800396 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.800414 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: E1205 19:14:48.817062 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:48Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:48Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:48 crc kubenswrapper[4982]: E1205 19:14:48.817216 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.819480 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.819523 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.819539 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.819559 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.819573 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.922876 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.922935 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.922950 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.922986 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:48 crc kubenswrapper[4982]: I1205 19:14:48.923000 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:48Z","lastTransitionTime":"2025-12-05T19:14:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.025088 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.025173 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.025191 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.025215 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.025239 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:49Z","lastTransitionTime":"2025-12-05T19:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.127692 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.127736 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.127748 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.127762 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.127771 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:49Z","lastTransitionTime":"2025-12-05T19:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.230361 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.230405 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.230416 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.230432 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.230444 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:49Z","lastTransitionTime":"2025-12-05T19:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.333581 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.333696 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.333720 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.333744 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.333761 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:49Z","lastTransitionTime":"2025-12-05T19:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.436698 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.436743 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.436753 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.436770 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.436782 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:49Z","lastTransitionTime":"2025-12-05T19:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.539888 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.539941 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.539951 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.539968 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.539981 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:49Z","lastTransitionTime":"2025-12-05T19:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.643181 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.643240 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.643254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.643278 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.643293 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:49Z","lastTransitionTime":"2025-12-05T19:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.746007 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.746059 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.746075 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.746096 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.746109 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:49Z","lastTransitionTime":"2025-12-05T19:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.853403 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.853446 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.853459 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.853473 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.853486 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:49Z","lastTransitionTime":"2025-12-05T19:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.956769 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.956824 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.956837 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.956856 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:49 crc kubenswrapper[4982]: I1205 19:14:49.956869 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:49Z","lastTransitionTime":"2025-12-05T19:14:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.058450 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.058484 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.058492 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.058506 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.058518 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:50Z","lastTransitionTime":"2025-12-05T19:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.161091 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.161144 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.161183 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.161203 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.161213 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:50Z","lastTransitionTime":"2025-12-05T19:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.263797 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.263852 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.263868 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.263890 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.263906 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:50Z","lastTransitionTime":"2025-12-05T19:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.368649 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.368708 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.368722 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.368746 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.368764 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:50Z","lastTransitionTime":"2025-12-05T19:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.389658 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.389726 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.389748 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.389770 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:50 crc kubenswrapper[4982]: E1205 19:14:50.389841 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:50 crc kubenswrapper[4982]: E1205 19:14:50.389994 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:50 crc kubenswrapper[4982]: E1205 19:14:50.390128 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:50 crc kubenswrapper[4982]: E1205 19:14:50.390245 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.472024 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.472069 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.472079 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.472094 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.472106 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:50Z","lastTransitionTime":"2025-12-05T19:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.575044 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.575098 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.575113 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.575128 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.575140 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:50Z","lastTransitionTime":"2025-12-05T19:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.678453 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.678510 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.678527 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.678550 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.678564 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:50Z","lastTransitionTime":"2025-12-05T19:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.781368 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.782289 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.782463 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.782610 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.782744 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:50Z","lastTransitionTime":"2025-12-05T19:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.885474 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.885534 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.885551 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.885574 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.885591 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:50Z","lastTransitionTime":"2025-12-05T19:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.987748 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.988055 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.988164 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.988271 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:50 crc kubenswrapper[4982]: I1205 19:14:50.988351 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:50Z","lastTransitionTime":"2025-12-05T19:14:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.092181 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.092555 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.092750 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.093090 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.093311 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:51Z","lastTransitionTime":"2025-12-05T19:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.195758 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.195819 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.195838 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.195864 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.195884 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:51Z","lastTransitionTime":"2025-12-05T19:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.298475 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.298821 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.298919 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.299004 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.299096 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:51Z","lastTransitionTime":"2025-12-05T19:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.402270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.402319 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.402337 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.402362 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.402381 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:51Z","lastTransitionTime":"2025-12-05T19:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.403654 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c34cc4f7-08ac-4266-b651-3926d3970c4d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdecba63a5b8fd4acc03e2440acafeabcdefcdb278f037117c7b5de2cf445e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02cf255b6f308b2b5209ba4b2a4d0ccf9e6ba461ddde390eb6ee53ea78c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71882afeb562c377bde2acd7f21f8e3431eb531d7f9f07930ae174dd34f60940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.423757 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"message\\\":\\\"n-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:34.360421 6626 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:34.360703 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 19:14:34.359871 6626 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 19:14:34.360938 6626 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 19:14:34.360964 6626 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:34.361040 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 19:14:34.361099 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 19:14:34.361110 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 19:14:34.361130 6626 factory.go:656] Stopping watch factory\\\\nI1205 19:14:34.361162 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 19:14:34.361173 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 19:14:34.361181 6626 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 19:14:34.361405 6626 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.451049 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.466973 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.485206 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.503334 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.505593 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.505720 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.505827 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.505981 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.506104 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:51Z","lastTransitionTime":"2025-12-05T19:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.516719 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.531083 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.543389 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.556429 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.569466 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.584995 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.596875 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.608802 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.608971 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.609064 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.609197 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.609301 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:51Z","lastTransitionTime":"2025-12-05T19:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.611950 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.636051 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.660273 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.675481 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.688778 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:51Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.712266 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.712314 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.712329 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.712350 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.712362 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:51Z","lastTransitionTime":"2025-12-05T19:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.815495 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.815532 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.815543 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.815562 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.815573 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:51Z","lastTransitionTime":"2025-12-05T19:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.918858 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.918913 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.918923 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.918945 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:51 crc kubenswrapper[4982]: I1205 19:14:51.918959 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:51Z","lastTransitionTime":"2025-12-05T19:14:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.022533 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.022584 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.022595 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.022618 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.022631 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:52Z","lastTransitionTime":"2025-12-05T19:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.124537 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.124816 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.124965 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.125091 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.125292 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:52Z","lastTransitionTime":"2025-12-05T19:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.227946 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.228022 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.228037 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.228061 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.228079 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:52Z","lastTransitionTime":"2025-12-05T19:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.331509 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.331577 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.331587 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.331607 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.331620 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:52Z","lastTransitionTime":"2025-12-05T19:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.390130 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.390219 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.390179 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:52 crc kubenswrapper[4982]: E1205 19:14:52.390333 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.390167 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:52 crc kubenswrapper[4982]: E1205 19:14:52.390475 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:52 crc kubenswrapper[4982]: E1205 19:14:52.390555 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:52 crc kubenswrapper[4982]: E1205 19:14:52.390633 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.434507 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.434547 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.434558 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.434575 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.434592 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:52Z","lastTransitionTime":"2025-12-05T19:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.537354 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.537417 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.537440 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.537461 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.537484 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:52Z","lastTransitionTime":"2025-12-05T19:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.640541 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.640602 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.640615 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.640636 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.640651 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:52Z","lastTransitionTime":"2025-12-05T19:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.743366 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.743432 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.743446 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.743467 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.743485 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:52Z","lastTransitionTime":"2025-12-05T19:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.846016 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.846059 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.846068 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.846083 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.846092 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:52Z","lastTransitionTime":"2025-12-05T19:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.948114 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.948145 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.948176 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.948194 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:52 crc kubenswrapper[4982]: I1205 19:14:52.948208 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:52Z","lastTransitionTime":"2025-12-05T19:14:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.050668 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.050707 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.050717 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.050737 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.050748 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:53Z","lastTransitionTime":"2025-12-05T19:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.154008 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.154033 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.154041 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.154054 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.154063 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:53Z","lastTransitionTime":"2025-12-05T19:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.256756 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.256810 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.256826 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.256847 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.256865 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:53Z","lastTransitionTime":"2025-12-05T19:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.359385 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.359424 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.359434 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.359449 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.359461 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:53Z","lastTransitionTime":"2025-12-05T19:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.461895 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.461937 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.461949 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.461968 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.461980 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:53Z","lastTransitionTime":"2025-12-05T19:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.565444 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.565484 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.565501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.565534 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.565547 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:53Z","lastTransitionTime":"2025-12-05T19:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.669374 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.669441 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.669453 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.669478 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.669492 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:53Z","lastTransitionTime":"2025-12-05T19:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.772386 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.772436 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.772445 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.772462 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.772473 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:53Z","lastTransitionTime":"2025-12-05T19:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.875783 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.875866 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.875881 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.875902 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.875926 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:53Z","lastTransitionTime":"2025-12-05T19:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.977978 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.978028 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.978039 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.978058 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:53 crc kubenswrapper[4982]: I1205 19:14:53.978068 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:53Z","lastTransitionTime":"2025-12-05T19:14:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.081012 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.081065 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.081075 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.081091 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.081102 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:54Z","lastTransitionTime":"2025-12-05T19:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.184246 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.184300 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.184320 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.184347 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.184368 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:54Z","lastTransitionTime":"2025-12-05T19:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.287638 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.287691 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.287703 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.287722 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.287739 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:54Z","lastTransitionTime":"2025-12-05T19:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.389270 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.389436 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:54 crc kubenswrapper[4982]: E1205 19:14:54.389761 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.390042 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.390115 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:54 crc kubenswrapper[4982]: E1205 19:14:54.390210 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:54 crc kubenswrapper[4982]: E1205 19:14:54.390574 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:54 crc kubenswrapper[4982]: E1205 19:14:54.390653 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.391568 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.391609 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.391623 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.391641 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.391705 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:54Z","lastTransitionTime":"2025-12-05T19:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.494760 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.494843 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.494854 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.494874 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.494891 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:54Z","lastTransitionTime":"2025-12-05T19:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.596990 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.597042 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.597061 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.597082 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.597093 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:54Z","lastTransitionTime":"2025-12-05T19:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.699449 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.699501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.699514 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.699531 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.699546 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:54Z","lastTransitionTime":"2025-12-05T19:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.802379 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.802470 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.802492 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.802518 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.802541 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:54Z","lastTransitionTime":"2025-12-05T19:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.905914 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.905987 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.906010 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.906036 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:54 crc kubenswrapper[4982]: I1205 19:14:54.906056 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:54Z","lastTransitionTime":"2025-12-05T19:14:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.009212 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.009276 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.009291 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.009312 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.009325 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:55Z","lastTransitionTime":"2025-12-05T19:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.112931 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.112980 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.113000 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.113024 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.113041 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:55Z","lastTransitionTime":"2025-12-05T19:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.215745 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.215827 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.215853 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.215886 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.215910 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:55Z","lastTransitionTime":"2025-12-05T19:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.318477 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.318558 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.318581 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.318611 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.318629 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:55Z","lastTransitionTime":"2025-12-05T19:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.421294 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.421343 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.421355 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.421371 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.421385 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:55Z","lastTransitionTime":"2025-12-05T19:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.525384 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.525509 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.525578 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.525610 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.525633 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:55Z","lastTransitionTime":"2025-12-05T19:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.628986 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.629050 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.629069 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.629094 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.629111 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:55Z","lastTransitionTime":"2025-12-05T19:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.732393 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.732438 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.732458 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.732481 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.732497 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:55Z","lastTransitionTime":"2025-12-05T19:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.836204 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.836276 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.836295 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.836320 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.836340 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:55Z","lastTransitionTime":"2025-12-05T19:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.940016 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.940815 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.940874 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.940921 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:55 crc kubenswrapper[4982]: I1205 19:14:55.940940 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:55Z","lastTransitionTime":"2025-12-05T19:14:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.044359 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.044442 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.044472 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.044504 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.044528 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:56Z","lastTransitionTime":"2025-12-05T19:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.046757 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:56 crc kubenswrapper[4982]: E1205 19:14:56.046907 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:56 crc kubenswrapper[4982]: E1205 19:14:56.046961 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs podName:4d68836a-462d-4364-bc12-b530a7cb0727 nodeName:}" failed. No retries permitted until 2025-12-05 19:15:28.046945612 +0000 UTC m=+106.928831617 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs") pod "network-metrics-daemon-6r5ns" (UID: "4d68836a-462d-4364-bc12-b530a7cb0727") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.147456 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.147515 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.147537 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.148088 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.148180 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:56Z","lastTransitionTime":"2025-12-05T19:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.250983 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.251038 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.251056 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.251083 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.251101 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:56Z","lastTransitionTime":"2025-12-05T19:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.359606 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.359653 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.359666 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.359684 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.359698 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:56Z","lastTransitionTime":"2025-12-05T19:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.389306 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.389310 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.389465 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:56 crc kubenswrapper[4982]: E1205 19:14:56.389587 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.389328 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:56 crc kubenswrapper[4982]: E1205 19:14:56.389766 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:56 crc kubenswrapper[4982]: E1205 19:14:56.389960 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:56 crc kubenswrapper[4982]: E1205 19:14:56.391201 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.462940 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.463322 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.463406 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.463539 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.463626 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:56Z","lastTransitionTime":"2025-12-05T19:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.567232 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.567288 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.567301 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.567322 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.567337 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:56Z","lastTransitionTime":"2025-12-05T19:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.670478 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.670538 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.670552 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.670579 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.670599 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:56Z","lastTransitionTime":"2025-12-05T19:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.773625 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.773706 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.773735 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.773766 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.773798 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:56Z","lastTransitionTime":"2025-12-05T19:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.876625 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.876686 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.876699 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.876724 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.876738 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:56Z","lastTransitionTime":"2025-12-05T19:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.964811 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8ldph_62e7c34f-d411-481e-a5bb-885e7cbd4326/kube-multus/0.log" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.964892 4982 generic.go:334] "Generic (PLEG): container finished" podID="62e7c34f-d411-481e-a5bb-885e7cbd4326" containerID="d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016" exitCode=1 Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.964934 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8ldph" event={"ID":"62e7c34f-d411-481e-a5bb-885e7cbd4326","Type":"ContainerDied","Data":"d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016"} Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.965539 4982 scope.go:117] "RemoveContainer" containerID="d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.980208 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.980269 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.980294 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.980326 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.980349 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:56Z","lastTransitionTime":"2025-12-05T19:14:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:56 crc kubenswrapper[4982]: I1205 19:14:56.985513 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c34cc4f7-08ac-4266-b651-3926d3970c4d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdecba63a5b8fd4acc03e2440acafeabcdefcdb278f037117c7b5de2cf445e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02cf255b6f308b2b5209ba4b2a4d0ccf9e6ba461ddde390eb6ee53ea78c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71882afeb562c377bde2acd7f21f8e3431eb531d7f9f07930ae174dd34f60940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:56Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.022396 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"message\\\":\\\"n-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:34.360421 6626 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:34.360703 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 19:14:34.359871 6626 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 19:14:34.360938 6626 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 19:14:34.360964 6626 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:34.361040 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 19:14:34.361099 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 19:14:34.361110 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 19:14:34.361130 6626 factory.go:656] Stopping watch factory\\\\nI1205 19:14:34.361162 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 19:14:34.361173 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 19:14:34.361181 6626 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 19:14:34.361405 6626 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.035554 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.051885 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.071352 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.082920 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.082978 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.082994 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.083015 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.083027 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:57Z","lastTransitionTime":"2025-12-05T19:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.084884 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.103524 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.120717 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.136561 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.148042 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.161661 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.175416 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.185508 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.185548 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.185558 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.185574 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.185585 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:57Z","lastTransitionTime":"2025-12-05T19:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.192024 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:56Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:56Z\\\",\\\"message\\\":\\\"2025-12-05T19:14:11+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08\\\\n2025-12-05T19:14:11+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08 to /host/opt/cni/bin/\\\\n2025-12-05T19:14:11Z [verbose] multus-daemon started\\\\n2025-12-05T19:14:11Z [verbose] Readiness Indicator file check\\\\n2025-12-05T19:14:56Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.205736 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.221103 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.237931 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.257828 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.273725 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.288863 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.289135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.289237 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.289317 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.289554 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:57Z","lastTransitionTime":"2025-12-05T19:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.393315 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.393538 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.393583 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.393628 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.393646 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:57Z","lastTransitionTime":"2025-12-05T19:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.496530 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.496585 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.496603 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.496628 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.496646 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:57Z","lastTransitionTime":"2025-12-05T19:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.600042 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.600118 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.600138 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.600202 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.600223 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:57Z","lastTransitionTime":"2025-12-05T19:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.703561 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.703950 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.704191 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.704385 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.704589 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:57Z","lastTransitionTime":"2025-12-05T19:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.807631 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.807679 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.807695 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.807718 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.807734 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:57Z","lastTransitionTime":"2025-12-05T19:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.911511 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.911570 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.911581 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.911603 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.911619 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:57Z","lastTransitionTime":"2025-12-05T19:14:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.971887 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8ldph_62e7c34f-d411-481e-a5bb-885e7cbd4326/kube-multus/0.log" Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.971963 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8ldph" event={"ID":"62e7c34f-d411-481e-a5bb-885e7cbd4326","Type":"ContainerStarted","Data":"2a8d874cf486b2c543493d14189c417e5514f5a9af33741f08b0993528368bbb"} Dec 05 19:14:57 crc kubenswrapper[4982]: I1205 19:14:57.990183 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:57Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.006296 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.015880 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.015926 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.015943 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.015966 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.015982 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:58Z","lastTransitionTime":"2025-12-05T19:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.027813 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.046109 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.064774 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a8d874cf486b2c543493d14189c417e5514f5a9af33741f08b0993528368bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:56Z\\\",\\\"message\\\":\\\"2025-12-05T19:14:11+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08\\\\n2025-12-05T19:14:11+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08 to /host/opt/cni/bin/\\\\n2025-12-05T19:14:11Z [verbose] multus-daemon started\\\\n2025-12-05T19:14:11Z [verbose] Readiness Indicator file check\\\\n2025-12-05T19:14:56Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.082391 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.102946 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.123531 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.123619 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.123766 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.123790 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.123806 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:58Z","lastTransitionTime":"2025-12-05T19:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.124084 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.143607 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.162588 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c34cc4f7-08ac-4266-b651-3926d3970c4d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdecba63a5b8fd4acc03e2440acafeabcdefcdb278f037117c7b5de2cf445e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02cf255b6f308b2b5209ba4b2a4d0ccf9e6ba461ddde390eb6ee53ea78c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71882afeb562c377bde2acd7f21f8e3431eb531d7f9f07930ae174dd34f60940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.190550 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"message\\\":\\\"n-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:34.360421 6626 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:34.360703 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 19:14:34.359871 6626 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 19:14:34.360938 6626 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 19:14:34.360964 6626 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:34.361040 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 19:14:34.361099 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 19:14:34.361110 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 19:14:34.361130 6626 factory.go:656] Stopping watch factory\\\\nI1205 19:14:34.361162 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 19:14:34.361173 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 19:14:34.361181 6626 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 19:14:34.361405 6626 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.207886 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.223359 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.227430 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.227458 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.227468 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.227504 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.227516 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:58Z","lastTransitionTime":"2025-12-05T19:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.238972 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.256865 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.273032 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.299815 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.320173 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.330263 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.330325 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.330362 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.330379 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.330391 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:58Z","lastTransitionTime":"2025-12-05T19:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.389767 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.389882 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.389770 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:14:58 crc kubenswrapper[4982]: E1205 19:14:58.389944 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.389959 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:14:58 crc kubenswrapper[4982]: E1205 19:14:58.390057 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:14:58 crc kubenswrapper[4982]: E1205 19:14:58.390213 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:14:58 crc kubenswrapper[4982]: E1205 19:14:58.390304 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.433411 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.433472 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.433486 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.433508 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.433522 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:58Z","lastTransitionTime":"2025-12-05T19:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.537128 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.537210 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.537224 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.537281 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.537299 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:58Z","lastTransitionTime":"2025-12-05T19:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.640207 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.640287 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.640312 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.640344 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.640368 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:58Z","lastTransitionTime":"2025-12-05T19:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.744417 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.744494 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.744522 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.744556 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.744579 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:58Z","lastTransitionTime":"2025-12-05T19:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.847733 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.847779 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.847797 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.847814 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.847826 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:58Z","lastTransitionTime":"2025-12-05T19:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.951338 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.951389 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.951405 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.951429 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.951450 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:58Z","lastTransitionTime":"2025-12-05T19:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.975660 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.975716 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.975733 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.975756 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:58 crc kubenswrapper[4982]: I1205 19:14:58.975773 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:58Z","lastTransitionTime":"2025-12-05T19:14:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:58 crc kubenswrapper[4982]: E1205 19:14:58.997466 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:58Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:58Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:58Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:58Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:58Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.003231 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.003292 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.003311 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.003336 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.003354 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: E1205 19:14:59.027411 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:59Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.032668 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.032732 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.032757 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.032818 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.032843 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: E1205 19:14:59.053190 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:59Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.058244 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.058307 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.058331 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.058360 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.058383 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: E1205 19:14:59.077611 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:59Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.083446 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.083488 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.083499 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.083515 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.083525 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: E1205 19:14:59.105042 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:14:59Z is after 2025-08-24T17:21:41Z" Dec 05 19:14:59 crc kubenswrapper[4982]: E1205 19:14:59.105452 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.107741 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.107811 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.107829 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.107883 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.107905 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.211009 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.211067 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.211079 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.211095 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.211106 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.314662 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.314711 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.314728 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.314752 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.314770 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.390984 4982 scope.go:117] "RemoveContainer" containerID="a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.419852 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.420170 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.420359 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.420609 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.421113 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.525911 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.526001 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.526032 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.526063 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.526085 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.630730 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.630800 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.630817 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.630840 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.630857 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.733927 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.733983 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.733999 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.734022 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.734039 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.837722 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.837797 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.837820 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.837844 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.837858 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.964335 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.964360 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.964369 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.964380 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.964396 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:14:59Z","lastTransitionTime":"2025-12-05T19:14:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.984925 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/2.log" Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.987182 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682"} Dec 05 19:14:59 crc kubenswrapper[4982]: I1205 19:14:59.988324 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.009835 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c34cc4f7-08ac-4266-b651-3926d3970c4d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdecba63a5b8fd4acc03e2440acafeabcdefcdb278f037117c7b5de2cf445e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02cf255b6f308b2b5209ba4b2a4d0ccf9e6ba461ddde390eb6ee53ea78c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71882afeb562c377bde2acd7f21f8e3431eb531d7f9f07930ae174dd34f60940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.033085 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"message\\\":\\\"n-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:34.360421 6626 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:34.360703 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 19:14:34.359871 6626 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 19:14:34.360938 6626 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 19:14:34.360964 6626 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:34.361040 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 19:14:34.361099 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 19:14:34.361110 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 19:14:34.361130 6626 factory.go:656] Stopping watch factory\\\\nI1205 19:14:34.361162 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 19:14:34.361173 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 19:14:34.361181 6626 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 19:14:34.361405 6626 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.050586 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.065762 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.066997 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.067028 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.067037 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.067057 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.067067 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:00Z","lastTransitionTime":"2025-12-05T19:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.076734 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.092918 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.111136 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.144865 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.159267 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.169937 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.169974 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.169985 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.170013 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.170026 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:00Z","lastTransitionTime":"2025-12-05T19:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.176192 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.190400 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.207348 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.225988 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.239564 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a8d874cf486b2c543493d14189c417e5514f5a9af33741f08b0993528368bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:56Z\\\",\\\"message\\\":\\\"2025-12-05T19:14:11+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08\\\\n2025-12-05T19:14:11+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08 to /host/opt/cni/bin/\\\\n2025-12-05T19:14:11Z [verbose] multus-daemon started\\\\n2025-12-05T19:14:11Z [verbose] Readiness Indicator file check\\\\n2025-12-05T19:14:56Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.249388 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.261872 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.272494 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.272732 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.272904 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.272924 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.272936 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:00Z","lastTransitionTime":"2025-12-05T19:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.277573 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.292916 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:00Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.375200 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.375552 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.375639 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.375734 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.375811 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:00Z","lastTransitionTime":"2025-12-05T19:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.389807 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:00 crc kubenswrapper[4982]: E1205 19:15:00.390089 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.390271 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:00 crc kubenswrapper[4982]: E1205 19:15:00.390619 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.390477 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:00 crc kubenswrapper[4982]: E1205 19:15:00.391010 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.390309 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:00 crc kubenswrapper[4982]: E1205 19:15:00.391402 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.478389 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.478679 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.478892 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.479128 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.479367 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:00Z","lastTransitionTime":"2025-12-05T19:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.581451 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.581714 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.581780 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.581841 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.581955 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:00Z","lastTransitionTime":"2025-12-05T19:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.685322 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.685350 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.685357 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.685370 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.685379 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:00Z","lastTransitionTime":"2025-12-05T19:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.788077 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.788205 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.788233 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.788280 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.788304 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:00Z","lastTransitionTime":"2025-12-05T19:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.891612 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.891694 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.891728 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.891760 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.891784 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:00Z","lastTransitionTime":"2025-12-05T19:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.994715 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.994779 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.994802 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.994833 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.994856 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:00Z","lastTransitionTime":"2025-12-05T19:15:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.995715 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/3.log" Dec 05 19:15:00 crc kubenswrapper[4982]: I1205 19:15:00.996851 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/2.log" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.002588 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682" exitCode=1 Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.002635 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682"} Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.002675 4982 scope.go:117] "RemoveContainer" containerID="a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.004313 4982 scope.go:117] "RemoveContainer" containerID="44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682" Dec 05 19:15:01 crc kubenswrapper[4982]: E1205 19:15:01.004792 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.025854 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.040895 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.056829 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.075231 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.090479 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a8d874cf486b2c543493d14189c417e5514f5a9af33741f08b0993528368bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:56Z\\\",\\\"message\\\":\\\"2025-12-05T19:14:11+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08\\\\n2025-12-05T19:14:11+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08 to /host/opt/cni/bin/\\\\n2025-12-05T19:14:11Z [verbose] multus-daemon started\\\\n2025-12-05T19:14:11Z [verbose] Readiness Indicator file check\\\\n2025-12-05T19:14:56Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.097904 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.097970 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.097986 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.098511 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.098565 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:01Z","lastTransitionTime":"2025-12-05T19:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.101858 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.117236 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.139192 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.156704 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.170266 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c34cc4f7-08ac-4266-b651-3926d3970c4d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdecba63a5b8fd4acc03e2440acafeabcdefcdb278f037117c7b5de2cf445e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02cf255b6f308b2b5209ba4b2a4d0ccf9e6ba461ddde390eb6ee53ea78c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71882afeb562c377bde2acd7f21f8e3431eb531d7f9f07930ae174dd34f60940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.193940 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"message\\\":\\\"n-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:34.360421 6626 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:34.360703 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 19:14:34.359871 6626 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 19:14:34.360938 6626 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 19:14:34.360964 6626 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:34.361040 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 19:14:34.361099 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 19:14:34.361110 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 19:14:34.361130 6626 factory.go:656] Stopping watch factory\\\\nI1205 19:14:34.361162 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 19:14:34.361173 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 19:14:34.361181 6626 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 19:14:34.361405 6626 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:15:00Z\\\",\\\"message\\\":\\\"r 12 objects: [openshift-machine-config-operator/machine-config-daemon-dldj9 openshift-multus/multus-8ldph openshift-multus/network-metrics-daemon-6r5ns openshift-network-diagnostics/network-check-target-xd92c openshift-dns/node-resolver-tlc5r openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-multus/multus-additional-cni-plugins-7q67q openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-etcd/etcd-crc openshift-image-registry/node-ca-hbm8m openshift-ovn-kubernetes/ovnkube-node-xlt6h openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI1205 19:15:00.399330 6965 services_controller.go:454] Service openshift-marketplace/community-operators for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1205 19:15:00.399350 6965 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.202217 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.202395 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.202487 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.202572 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.202676 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:01Z","lastTransitionTime":"2025-12-05T19:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.208901 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.222224 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.234195 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.249318 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.262064 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.281860 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.295492 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.305132 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.305174 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.305184 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.305198 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.305210 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:01Z","lastTransitionTime":"2025-12-05T19:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.404460 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.408346 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.408415 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.408441 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.408471 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.408495 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:01Z","lastTransitionTime":"2025-12-05T19:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.427745 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.443115 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.466946 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.489639 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.511198 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.511237 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.511252 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.511272 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.511286 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:01Z","lastTransitionTime":"2025-12-05T19:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.511714 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.529932 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.546919 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.565741 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.587325 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a8d874cf486b2c543493d14189c417e5514f5a9af33741f08b0993528368bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:56Z\\\",\\\"message\\\":\\\"2025-12-05T19:14:11+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08\\\\n2025-12-05T19:14:11+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08 to /host/opt/cni/bin/\\\\n2025-12-05T19:14:11Z [verbose] multus-daemon started\\\\n2025-12-05T19:14:11Z [verbose] Readiness Indicator file check\\\\n2025-12-05T19:14:56Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.602727 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.613543 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.613601 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.613620 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.613643 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.613662 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:01Z","lastTransitionTime":"2025-12-05T19:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.619539 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.643959 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.666465 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.687893 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.702722 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.717307 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.717387 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.717414 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.717443 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.717467 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:01Z","lastTransitionTime":"2025-12-05T19:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.730277 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c34cc4f7-08ac-4266-b651-3926d3970c4d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdecba63a5b8fd4acc03e2440acafeabcdefcdb278f037117c7b5de2cf445e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02cf255b6f308b2b5209ba4b2a4d0ccf9e6ba461ddde390eb6ee53ea78c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71882afeb562c377bde2acd7f21f8e3431eb531d7f9f07930ae174dd34f60940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.763044 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1dd25c49b7e1c02a1f8bca65673e7a0d14e75bef568e254225aef77296019ef\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"message\\\":\\\"n-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 19:14:34.360421 6626 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 19:14:34.360703 6626 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1205 19:14:34.359871 6626 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 19:14:34.360938 6626 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 19:14:34.360964 6626 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 19:14:34.361040 6626 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 19:14:34.361099 6626 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 19:14:34.361110 6626 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 19:14:34.361130 6626 factory.go:656] Stopping watch factory\\\\nI1205 19:14:34.361162 6626 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 19:14:34.361173 6626 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 19:14:34.361181 6626 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 19:14:34.361405 6626 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:33Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:15:00Z\\\",\\\"message\\\":\\\"r 12 objects: [openshift-machine-config-operator/machine-config-daemon-dldj9 openshift-multus/multus-8ldph openshift-multus/network-metrics-daemon-6r5ns openshift-network-diagnostics/network-check-target-xd92c openshift-dns/node-resolver-tlc5r openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-multus/multus-additional-cni-plugins-7q67q openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-etcd/etcd-crc openshift-image-registry/node-ca-hbm8m openshift-ovn-kubernetes/ovnkube-node-xlt6h openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI1205 19:15:00.399330 6965 services_controller.go:454] Service openshift-marketplace/community-operators for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1205 19:15:00.399350 6965 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:01Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.819561 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.819613 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.819627 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.819644 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.819655 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:01Z","lastTransitionTime":"2025-12-05T19:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.923927 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.924070 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.924103 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.924128 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:01 crc kubenswrapper[4982]: I1205 19:15:01.924184 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:01Z","lastTransitionTime":"2025-12-05T19:15:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.010736 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/3.log" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.016416 4982 scope.go:117] "RemoveContainer" containerID="44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682" Dec 05 19:15:02 crc kubenswrapper[4982]: E1205 19:15:02.016698 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.028242 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.028315 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.028339 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.028371 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.028396 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:02Z","lastTransitionTime":"2025-12-05T19:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.034208 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c34cc4f7-08ac-4266-b651-3926d3970c4d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdecba63a5b8fd4acc03e2440acafeabcdefcdb278f037117c7b5de2cf445e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02cf255b6f308b2b5209ba4b2a4d0ccf9e6ba461ddde390eb6ee53ea78c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71882afeb562c377bde2acd7f21f8e3431eb531d7f9f07930ae174dd34f60940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.068339 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:15:00Z\\\",\\\"message\\\":\\\"r 12 objects: [openshift-machine-config-operator/machine-config-daemon-dldj9 openshift-multus/multus-8ldph openshift-multus/network-metrics-daemon-6r5ns openshift-network-diagnostics/network-check-target-xd92c openshift-dns/node-resolver-tlc5r openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-multus/multus-additional-cni-plugins-7q67q openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-etcd/etcd-crc openshift-image-registry/node-ca-hbm8m openshift-ovn-kubernetes/ovnkube-node-xlt6h openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI1205 19:15:00.399330 6965 services_controller.go:454] Service openshift-marketplace/community-operators for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1205 19:15:00.399350 6965 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.087675 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.103920 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.124269 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.131397 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.131458 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.131476 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.131501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.131520 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:02Z","lastTransitionTime":"2025-12-05T19:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.140384 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.161558 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.173714 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.186913 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.197381 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.211473 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.221521 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.232995 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a8d874cf486b2c543493d14189c417e5514f5a9af33741f08b0993528368bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:56Z\\\",\\\"message\\\":\\\"2025-12-05T19:14:11+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08\\\\n2025-12-05T19:14:11+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08 to /host/opt/cni/bin/\\\\n2025-12-05T19:14:11Z [verbose] multus-daemon started\\\\n2025-12-05T19:14:11Z [verbose] Readiness Indicator file check\\\\n2025-12-05T19:14:56Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.235508 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.235608 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.235675 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.235788 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.235860 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:02Z","lastTransitionTime":"2025-12-05T19:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.244998 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.264228 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.279006 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.295471 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.305560 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:02Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.338186 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.338242 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.338254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.338276 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.338291 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:02Z","lastTransitionTime":"2025-12-05T19:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.389899 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.389908 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.390316 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.390443 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:02 crc kubenswrapper[4982]: E1205 19:15:02.390454 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:02 crc kubenswrapper[4982]: E1205 19:15:02.390741 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:02 crc kubenswrapper[4982]: E1205 19:15:02.390667 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:02 crc kubenswrapper[4982]: E1205 19:15:02.390702 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.443036 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.443118 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.443196 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.443232 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.443255 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:02Z","lastTransitionTime":"2025-12-05T19:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.546685 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.547107 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.547325 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.547487 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.547724 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:02Z","lastTransitionTime":"2025-12-05T19:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.651417 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.651471 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.651482 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.651496 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.651507 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:02Z","lastTransitionTime":"2025-12-05T19:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.754472 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.754538 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.754557 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.754581 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.754598 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:02Z","lastTransitionTime":"2025-12-05T19:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.858165 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.858218 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.858231 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.858248 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.858260 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:02Z","lastTransitionTime":"2025-12-05T19:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.961340 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.961410 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.961432 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.961459 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:02 crc kubenswrapper[4982]: I1205 19:15:02.961479 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:02Z","lastTransitionTime":"2025-12-05T19:15:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.064348 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.064415 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.064435 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.064462 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.064488 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:03Z","lastTransitionTime":"2025-12-05T19:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.168017 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.168088 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.168105 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.168128 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.168176 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:03Z","lastTransitionTime":"2025-12-05T19:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.271821 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.271887 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.271904 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.271927 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.271944 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:03Z","lastTransitionTime":"2025-12-05T19:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.375467 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.375524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.375541 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.375563 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.375579 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:03Z","lastTransitionTime":"2025-12-05T19:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.402779 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.477690 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.477742 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.477755 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.477771 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.477782 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:03Z","lastTransitionTime":"2025-12-05T19:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.579930 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.580014 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.580037 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.580069 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.580092 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:03Z","lastTransitionTime":"2025-12-05T19:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.682679 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.682723 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.682735 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.682752 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.682764 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:03Z","lastTransitionTime":"2025-12-05T19:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.785635 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.785684 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.785695 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.785713 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.785726 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:03Z","lastTransitionTime":"2025-12-05T19:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.889139 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.889247 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.889268 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.889293 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.889312 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:03Z","lastTransitionTime":"2025-12-05T19:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.992124 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.992213 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.992233 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.992257 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:03 crc kubenswrapper[4982]: I1205 19:15:03.992273 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:03Z","lastTransitionTime":"2025-12-05T19:15:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.095844 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.095952 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.095980 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.096014 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.096041 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:04Z","lastTransitionTime":"2025-12-05T19:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.198852 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.198908 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.198923 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.198943 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.198967 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:04Z","lastTransitionTime":"2025-12-05T19:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.302028 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.302079 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.302093 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.302111 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.302123 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:04Z","lastTransitionTime":"2025-12-05T19:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.389460 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.389520 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.389596 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.389501 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:04 crc kubenswrapper[4982]: E1205 19:15:04.389681 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:04 crc kubenswrapper[4982]: E1205 19:15:04.389799 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:04 crc kubenswrapper[4982]: E1205 19:15:04.389890 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:04 crc kubenswrapper[4982]: E1205 19:15:04.390056 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.406466 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.406526 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.406540 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.406559 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.406574 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:04Z","lastTransitionTime":"2025-12-05T19:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.509445 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.509482 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.509491 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.509505 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.509514 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:04Z","lastTransitionTime":"2025-12-05T19:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.613121 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.613216 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.613235 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.613263 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.613284 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:04Z","lastTransitionTime":"2025-12-05T19:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.716829 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.716877 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.716889 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.716912 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.716924 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:04Z","lastTransitionTime":"2025-12-05T19:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.819597 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.819627 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.819636 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.819648 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.819657 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:04Z","lastTransitionTime":"2025-12-05T19:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.922717 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.922771 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.922790 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.922816 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:04 crc kubenswrapper[4982]: I1205 19:15:04.922852 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:04Z","lastTransitionTime":"2025-12-05T19:15:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.025042 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.025123 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.025181 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.025213 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.025233 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:05Z","lastTransitionTime":"2025-12-05T19:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.128194 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.128262 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.128280 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.128323 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.128342 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:05Z","lastTransitionTime":"2025-12-05T19:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.231245 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.231300 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.231316 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.231338 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.231355 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:05Z","lastTransitionTime":"2025-12-05T19:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.334143 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.334294 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.334315 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.334378 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.334400 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:05Z","lastTransitionTime":"2025-12-05T19:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.438548 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.438663 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.438685 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.438710 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.438728 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:05Z","lastTransitionTime":"2025-12-05T19:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.541724 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.541821 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.541838 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.541858 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.541871 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:05Z","lastTransitionTime":"2025-12-05T19:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.645135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.645192 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.645205 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.645225 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.645240 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:05Z","lastTransitionTime":"2025-12-05T19:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.749095 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.749209 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.749238 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.749270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.749291 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:05Z","lastTransitionTime":"2025-12-05T19:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.853488 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.853576 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.853608 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.853639 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.853660 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:05Z","lastTransitionTime":"2025-12-05T19:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.956066 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.956134 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.956176 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.956191 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:05 crc kubenswrapper[4982]: I1205 19:15:05.956227 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:05Z","lastTransitionTime":"2025-12-05T19:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.059125 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.059169 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.059177 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.059189 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.059198 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:06Z","lastTransitionTime":"2025-12-05T19:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.161455 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.161482 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.161489 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.161502 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.161510 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:06Z","lastTransitionTime":"2025-12-05T19:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.265037 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.265353 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.265390 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.265420 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.265446 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:06Z","lastTransitionTime":"2025-12-05T19:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.369295 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.369378 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.369396 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.369420 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.369437 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:06Z","lastTransitionTime":"2025-12-05T19:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.390023 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.390082 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.390125 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:06 crc kubenswrapper[4982]: E1205 19:15:06.390280 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.390296 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:06 crc kubenswrapper[4982]: E1205 19:15:06.390413 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:06 crc kubenswrapper[4982]: E1205 19:15:06.390587 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:06 crc kubenswrapper[4982]: E1205 19:15:06.390677 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.471687 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.471821 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.471838 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.471856 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.471867 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:06Z","lastTransitionTime":"2025-12-05T19:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.575231 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.575293 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.575304 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.575344 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.575360 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:06Z","lastTransitionTime":"2025-12-05T19:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.678791 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.678836 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.678845 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.678860 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.678869 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:06Z","lastTransitionTime":"2025-12-05T19:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.782768 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.782841 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.782858 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.782922 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.782939 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:06Z","lastTransitionTime":"2025-12-05T19:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.885427 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.885487 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.885508 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.885532 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.885550 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:06Z","lastTransitionTime":"2025-12-05T19:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.989726 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.989794 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.989815 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.989839 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:06 crc kubenswrapper[4982]: I1205 19:15:06.989857 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:06Z","lastTransitionTime":"2025-12-05T19:15:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.092330 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.092391 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.092407 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.092431 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.092449 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:07Z","lastTransitionTime":"2025-12-05T19:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.196070 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.196119 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.196136 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.196196 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.196214 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:07Z","lastTransitionTime":"2025-12-05T19:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.298860 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.298940 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.298958 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.299014 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.299034 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:07Z","lastTransitionTime":"2025-12-05T19:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.402032 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.402116 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.402136 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.402192 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.402213 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:07Z","lastTransitionTime":"2025-12-05T19:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.505342 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.505388 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.505403 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.505422 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.505437 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:07Z","lastTransitionTime":"2025-12-05T19:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.608949 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.609027 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.609042 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.609063 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.609077 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:07Z","lastTransitionTime":"2025-12-05T19:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.711582 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.711658 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.711673 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.711698 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.711719 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:07Z","lastTransitionTime":"2025-12-05T19:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.814212 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.814300 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.814312 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.814328 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.814363 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:07Z","lastTransitionTime":"2025-12-05T19:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.917817 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.917902 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.917987 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.918059 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:07 crc kubenswrapper[4982]: I1205 19:15:07.918082 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:07Z","lastTransitionTime":"2025-12-05T19:15:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.021572 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.021622 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.021639 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.021661 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.021679 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:08Z","lastTransitionTime":"2025-12-05T19:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.127012 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.127095 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.127120 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.127180 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.127220 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:08Z","lastTransitionTime":"2025-12-05T19:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.230602 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.230663 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.230682 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.230704 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.230721 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:08Z","lastTransitionTime":"2025-12-05T19:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.292572 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.292861 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.292812401 +0000 UTC m=+151.174698396 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.334055 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.334142 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.334196 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.334225 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.334245 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:08Z","lastTransitionTime":"2025-12-05T19:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.389486 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.389569 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.389650 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.389749 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.389775 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.389961 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.390005 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.390107 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.394341 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.394674 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.394761 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.394819 4982 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.394832 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.394778 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.394856 4982 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.394880 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.394860698 +0000 UTC m=+151.276746703 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.394899 4982 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.395137 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.395233 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.395204157 +0000 UTC m=+151.277090362 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.395263 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.395246058 +0000 UTC m=+151.277132053 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.395275 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.395311 4982 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.395333 4982 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:15:08 crc kubenswrapper[4982]: E1205 19:15:08.395448 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.395409012 +0000 UTC m=+151.277295047 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.436722 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.436786 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.436800 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.436825 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.436837 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:08Z","lastTransitionTime":"2025-12-05T19:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.539891 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.539949 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.539971 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.539998 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.540022 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:08Z","lastTransitionTime":"2025-12-05T19:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.642741 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.642795 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.642814 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.642838 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.642856 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:08Z","lastTransitionTime":"2025-12-05T19:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.746574 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.746662 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.746686 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.746715 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.746736 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:08Z","lastTransitionTime":"2025-12-05T19:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.850116 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.850200 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.850216 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.850235 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.850249 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:08Z","lastTransitionTime":"2025-12-05T19:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.953605 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.953686 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.953710 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.953742 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:08 crc kubenswrapper[4982]: I1205 19:15:08.953780 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:08Z","lastTransitionTime":"2025-12-05T19:15:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.056724 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.056783 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.056796 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.056819 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.056832 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.160283 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.160369 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.160393 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.160419 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.160436 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.189519 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.189614 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.189634 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.189664 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.189684 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: E1205 19:15:09.210406 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.216519 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.216610 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.216636 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.216710 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.216764 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: E1205 19:15:09.238602 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.243868 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.243921 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.243936 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.243959 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.243976 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: E1205 19:15:09.260553 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.264792 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.264849 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.264867 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.264891 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.264908 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: E1205 19:15:09.281805 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.287265 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.287347 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.287358 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.287381 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.287395 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: E1205 19:15:09.304164 4982 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404556Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865356Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T19:15:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c738741a-5d3a-4a2b-9f49-09675e56a75b\\\",\\\"systemUUID\\\":\\\"3fb999fe-b94a-4144-86b7-b9a7445c3e37\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:09Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:09 crc kubenswrapper[4982]: E1205 19:15:09.304328 4982 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.306280 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.306318 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.306342 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.306360 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.306375 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.408943 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.408977 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.408986 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.408999 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.409007 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.511708 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.511772 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.511789 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.511813 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.511828 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.615049 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.615118 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.615135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.615200 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.615213 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.718031 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.718102 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.718128 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.718187 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.718205 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.821449 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.821501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.821529 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.821549 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.821563 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.923879 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.923925 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.923934 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.923949 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:09 crc kubenswrapper[4982]: I1205 19:15:09.923959 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:09Z","lastTransitionTime":"2025-12-05T19:15:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.026728 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.026791 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.026813 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.026836 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.026853 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:10Z","lastTransitionTime":"2025-12-05T19:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.129515 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.129575 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.129597 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.129621 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.129642 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:10Z","lastTransitionTime":"2025-12-05T19:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.232908 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.232989 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.233007 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.233029 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.233046 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:10Z","lastTransitionTime":"2025-12-05T19:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.336991 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.337035 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.337043 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.337058 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.337067 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:10Z","lastTransitionTime":"2025-12-05T19:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.390058 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.390129 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.390058 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.390245 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:10 crc kubenswrapper[4982]: E1205 19:15:10.390280 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:10 crc kubenswrapper[4982]: E1205 19:15:10.390391 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:10 crc kubenswrapper[4982]: E1205 19:15:10.390568 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:10 crc kubenswrapper[4982]: E1205 19:15:10.390685 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.441034 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.441093 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.441110 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.441135 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.441186 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:10Z","lastTransitionTime":"2025-12-05T19:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.544696 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.544745 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.544760 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.544781 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.544797 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:10Z","lastTransitionTime":"2025-12-05T19:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.648352 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.648867 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.648886 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.648910 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.648928 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:10Z","lastTransitionTime":"2025-12-05T19:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.751755 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.751822 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.751839 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.751863 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.751879 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:10Z","lastTransitionTime":"2025-12-05T19:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.854767 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.854852 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.854872 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.854902 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.854924 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:10Z","lastTransitionTime":"2025-12-05T19:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.957854 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.957910 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.957926 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.957950 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:10 crc kubenswrapper[4982]: I1205 19:15:10.957987 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:10Z","lastTransitionTime":"2025-12-05T19:15:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.060412 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.060475 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.060491 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.060512 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.060531 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:11Z","lastTransitionTime":"2025-12-05T19:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.163964 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.164021 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.164032 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.164049 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.164062 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:11Z","lastTransitionTime":"2025-12-05T19:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.267676 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.267724 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.267745 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.267774 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.267799 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:11Z","lastTransitionTime":"2025-12-05T19:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.371261 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.371337 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.371358 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.371381 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.371397 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:11Z","lastTransitionTime":"2025-12-05T19:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.407922 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-tlc5r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bc220b96-80bd-4c1e-99c7-a39446465bfd\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0165e4806c22abaea10e9b05429d2ef21d103db8aee1b8fbee0ddb9cbb72c235\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cdxc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:09Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-tlc5r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.436889 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-7q67q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c16040d8-4ec1-41d8-8462-03c5d07aa6b9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2d9e1345e57ccce57a325c30835dfe937dceeb98bf3d74d657bc872292d36b95\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c4bb99540f84fe2f608d64fda4410bbffd204eef6828ae26d89b835f2cfa6df4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://19cfc5042204ca988c616a106e498b70c8033dd6a2ec6c7919b61e12ab196b25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4896c3ed955c0f893f8660a011e08e240416ccfa57176d9068aa90aeb642c47a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e0ef5816f9a49d277dc1d37260fa807a7495ef6a1fe7c604fa5628a5739da521\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8de26bb4942f3ac547cef5668008ae0d7075ea5a420bc6a4042c8452a3ab5545\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd9cfff949bd02cf31cf89d364aa0341e0fa1538f21117b931ca0c3f1105787\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-59mx6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-7q67q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.459561 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ade8a138-629c-4ff7-8c39-c86ab2bb6a98\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2332d0dcad4d12d48f19506ace0eadeafa6848e6caf2da83b1f8e369a77e41a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa49ecda93047f7fdc8b0de54465fb47f6e2833dc37eb6d38a39c8f730da9242\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xtbsz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-j2bqj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.473477 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d79aaa81-9c19-41c8-b983-d74b032a3520\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://debf150e35afe2a1027eaa61b2b2730a48faaad43f1bcf3537cad511c9d4b14e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d0f52f5535022ad363e090d1f9e0015b8dd80610a7e792b4b81ee07aa2dccd74\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d0f52f5535022ad363e090d1f9e0015b8dd80610a7e792b4b81ee07aa2dccd74\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.474477 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.474531 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.474543 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.474560 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.474572 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:11Z","lastTransitionTime":"2025-12-05T19:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.509224 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f4724ba2-bd4d-43be-a333-82e4d8bb1b07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://06e56f6aefc09fb35a1c522455469211e41ad9652856f8bd4a14c7b933c7f367\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://34ddb0ff8e429e3da1548dc29971b82d2f348905efa1dd125387f14a72d7ed1f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e596e0a2907b7ff5b5414bfb92afff775ccb93288a47f58c89906743a740f6b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98d6e13af205da14b2b121aa634d01d9edd4744a79395c5f9147391fd3079d76\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://79858caad081d829fd33dcaf9554cfd2d45d00e8f842e2a3486f8412439c5643\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://769f81512b5a5b29d71fb12cbfa9d8a577ef425b6aaf1e149f915e5242327967\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8acee3076d2270082c99bdd12a80c973dfebd22444e7fc3cbcba0122435359f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8bf2864dc3f00cc882b6c4b6d34d6b99932e0adde36471136d58794c8ff326bf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.523000 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.538486 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.549777 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f1b3db73687952b68e334a691dca0be0feccfff2551be0075a8a039e0dad98a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.565067 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8bf9520c-1e7a-450c-a6d5-b460ee7917a5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8440fc2bc53456ba97b241a2cd1e5f5fd7a9180f7ee13dcb33b060dee4fe7409\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e40c3074e615a7b64cd2bc59396535af19411ab4c7c2e18fc24021b895035b6\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4381bbd8534d587c35d5af524fbe3b139e44c48e460a8d703ff9f05b632c2d38\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.578444 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.578492 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.578503 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.578521 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.578535 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:11Z","lastTransitionTime":"2025-12-05T19:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.579347 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.594426 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8ldph" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"62e7c34f-d411-481e-a5bb-885e7cbd4326\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2a8d874cf486b2c543493d14189c417e5514f5a9af33741f08b0993528368bbb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:14:56Z\\\",\\\"message\\\":\\\"2025-12-05T19:14:11+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08\\\\n2025-12-05T19:14:11+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_e84bbf1f-1124-4ca1-a29f-45d202c43d08 to /host/opt/cni/bin/\\\\n2025-12-05T19:14:11Z [verbose] multus-daemon started\\\\n2025-12-05T19:14:11Z [verbose] Readiness Indicator file check\\\\n2025-12-05T19:14:56Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-v2rt4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8ldph\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.608208 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6902f814-e220-41f2-887a-1831b28c14ee\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf561d658fadf92817939c5b0449d593631cc54a606fa24e7f4c0f0a25c4652a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ztpvh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dldj9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.619821 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-hbm8m" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"363b9582-fba3-4516-b66d-4623a292a882\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://516ddb770bef6507f8782ef93dfafbe19dc4502d89452a372b0ecdb38bf160bc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tr96s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:12Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-hbm8m\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.640388 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e316779-c566-4497-b5dc-74bd06c3a798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T19:14:04Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 19:13:53.766512 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 19:13:53.768506 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3751488188/tls.crt::/tmp/serving-cert-3751488188/tls.key\\\\\\\"\\\\nI1205 19:14:04.089669 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 19:14:04.100029 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 19:14:04.100074 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 19:14:04.100143 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 19:14:04.100192 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 19:14:04.108828 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1205 19:14:04.109050 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1205 19:14:04.109071 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109081 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 19:14:04.109088 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 19:14:04.109092 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 19:14:04.109097 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 19:14:04.109101 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1205 19:14:04.113823 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.660056 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d9c515e08149ef458809e4134c03664a578f2e39c5c5bda53432ba1f1d62542\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.676730 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://80eb32253f12e7b29d7ab55d333dc3f60bcca38b87076a7a9e341a813bef058e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://284a8422001277d05f73e8220a720c97a696bf3a8aef2f7be729093da8fa72e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.683799 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.683872 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.683898 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.683931 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.683954 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:11Z","lastTransitionTime":"2025-12-05T19:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.691364 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d68836a-462d-4364-bc12-b530a7cb0727\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-zqd8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:24Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6r5ns\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.706847 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c34cc4f7-08ac-4266-b651-3926d3970c4d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:13:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://fdecba63a5b8fd4acc03e2440acafeabcdefcdb278f037117c7b5de2cf445e79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9cc02cf255b6f308b2b5209ba4b2a4d0ccf9e6ba461ddde390eb6ee53ea78c14\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://71882afeb562c377bde2acd7f21f8e3431eb531d7f9f07930ae174dd34f60940\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:13:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e1a53d8f70eb66f33c1359dc85460851dc2f3a3ece1ebe96b4f6cca14300f1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:13:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:13:42Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:13:41Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.739626 4982 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:11Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T19:15:00Z\\\",\\\"message\\\":\\\"r 12 objects: [openshift-machine-config-operator/machine-config-daemon-dldj9 openshift-multus/multus-8ldph openshift-multus/network-metrics-daemon-6r5ns openshift-network-diagnostics/network-check-target-xd92c openshift-dns/node-resolver-tlc5r openshift-kube-scheduler/openshift-kube-scheduler-crc openshift-multus/multus-additional-cni-plugins-7q67q openshift-network-operator/network-operator-58b4c7f79c-55gtf openshift-etcd/etcd-crc openshift-image-registry/node-ca-hbm8m openshift-ovn-kubernetes/ovnkube-node-xlt6h openshift-network-node-identity/network-node-identity-vrzqb]\\\\nI1205 19:15:00.399330 6965 services_controller.go:454] Service openshift-marketplace/community-operators for network=default has 1 cluster-wide, 0 per-node configs, 0 template configs, making 1 (cluster) 0 (per node) and 0 (template) load balancers\\\\nF1205 19:15:00.399350 6965 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T19:14:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T19:14:10Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T19:14:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sbkcn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T19:14:10Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-xlt6h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T19:15:11Z is after 2025-08-24T17:21:41Z" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.786990 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.787064 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.787089 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.787122 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.787182 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:11Z","lastTransitionTime":"2025-12-05T19:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.890791 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.890847 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.890865 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.890884 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.890898 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:11Z","lastTransitionTime":"2025-12-05T19:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.994443 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.994503 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.994516 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.994539 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:11 crc kubenswrapper[4982]: I1205 19:15:11.994558 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:11Z","lastTransitionTime":"2025-12-05T19:15:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.098736 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.098786 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.098800 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.098826 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.098841 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:12Z","lastTransitionTime":"2025-12-05T19:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.202035 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.202251 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.202293 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.202330 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.202353 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:12Z","lastTransitionTime":"2025-12-05T19:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.304559 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.304594 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.304603 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.304616 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.304626 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:12Z","lastTransitionTime":"2025-12-05T19:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.389562 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.389573 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:12 crc kubenswrapper[4982]: E1205 19:15:12.389708 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.389590 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:12 crc kubenswrapper[4982]: E1205 19:15:12.389764 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.389565 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:12 crc kubenswrapper[4982]: E1205 19:15:12.389849 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:12 crc kubenswrapper[4982]: E1205 19:15:12.389913 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.407688 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.407716 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.407723 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.407738 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.407747 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:12Z","lastTransitionTime":"2025-12-05T19:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.511697 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.511762 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.511786 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.511816 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.511839 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:12Z","lastTransitionTime":"2025-12-05T19:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.615141 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.615243 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.615260 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.615285 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.615303 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:12Z","lastTransitionTime":"2025-12-05T19:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.718191 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.718238 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.718253 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.718270 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.718281 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:12Z","lastTransitionTime":"2025-12-05T19:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.821764 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.821847 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.821881 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.821918 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.821940 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:12Z","lastTransitionTime":"2025-12-05T19:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.925786 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.925844 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.925862 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.925888 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:12 crc kubenswrapper[4982]: I1205 19:15:12.925904 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:12Z","lastTransitionTime":"2025-12-05T19:15:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.028629 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.028682 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.028692 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.028711 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.028721 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:13Z","lastTransitionTime":"2025-12-05T19:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.132452 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.132529 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.132545 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.132572 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.132594 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:13Z","lastTransitionTime":"2025-12-05T19:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.235818 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.235902 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.235927 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.235961 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.235982 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:13Z","lastTransitionTime":"2025-12-05T19:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.338546 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.338611 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.338631 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.338662 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.338682 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:13Z","lastTransitionTime":"2025-12-05T19:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.442076 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.442134 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.442145 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.442184 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.442199 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:13Z","lastTransitionTime":"2025-12-05T19:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.545961 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.546340 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.546853 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.547366 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.547687 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:13Z","lastTransitionTime":"2025-12-05T19:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.650714 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.650969 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.651112 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.651288 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.651455 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:13Z","lastTransitionTime":"2025-12-05T19:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.754455 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.754507 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.754524 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.754547 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.754564 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:13Z","lastTransitionTime":"2025-12-05T19:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.858003 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.858289 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.858423 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.858546 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.858668 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:13Z","lastTransitionTime":"2025-12-05T19:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.961635 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.961716 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.961736 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.961766 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:13 crc kubenswrapper[4982]: I1205 19:15:13.961786 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:13Z","lastTransitionTime":"2025-12-05T19:15:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.065087 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.065145 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.065218 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.065252 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.065273 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:14Z","lastTransitionTime":"2025-12-05T19:15:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.168123 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.168219 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.168238 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.168262 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.168280 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:14Z","lastTransitionTime":"2025-12-05T19:15:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.271380 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.271445 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.271461 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.271485 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.271507 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:14Z","lastTransitionTime":"2025-12-05T19:15:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.373971 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.373999 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.374007 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.374019 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.374029 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:14Z","lastTransitionTime":"2025-12-05T19:15:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.389608 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.389812 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.389967 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.390032 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:14 crc kubenswrapper[4982]: E1205 19:15:14.390255 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:14 crc kubenswrapper[4982]: E1205 19:15:14.390486 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:14 crc kubenswrapper[4982]: E1205 19:15:14.390642 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:14 crc kubenswrapper[4982]: E1205 19:15:14.391294 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.477659 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.477721 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.477732 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.477750 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.477767 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:14Z","lastTransitionTime":"2025-12-05T19:15:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.580317 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.580381 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.580398 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.580424 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.580441 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:14Z","lastTransitionTime":"2025-12-05T19:15:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.683274 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.683334 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.683359 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.683389 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.683409 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:14Z","lastTransitionTime":"2025-12-05T19:15:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.785893 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.785945 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.785962 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.785986 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.786002 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:14Z","lastTransitionTime":"2025-12-05T19:15:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.887966 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.888030 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.888053 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.888081 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.888104 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:14Z","lastTransitionTime":"2025-12-05T19:15:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.990812 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.990886 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.990909 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.990939 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:14 crc kubenswrapper[4982]: I1205 19:15:14.990961 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:14Z","lastTransitionTime":"2025-12-05T19:15:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.093837 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.093896 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.093914 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.093937 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.093953 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:15Z","lastTransitionTime":"2025-12-05T19:15:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.196845 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.196918 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.196937 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.196963 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.196985 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:15Z","lastTransitionTime":"2025-12-05T19:15:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.299889 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.299922 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.299934 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.299949 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.299962 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:15Z","lastTransitionTime":"2025-12-05T19:15:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.402028 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.402097 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.402115 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.402141 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.402205 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:15Z","lastTransitionTime":"2025-12-05T19:15:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.505379 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.505413 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.505428 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.505449 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.505465 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:15Z","lastTransitionTime":"2025-12-05T19:15:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.608440 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.608493 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.608509 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.608531 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.608548 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:15Z","lastTransitionTime":"2025-12-05T19:15:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.711429 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.711963 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.712002 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.712033 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.712055 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:15Z","lastTransitionTime":"2025-12-05T19:15:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.814964 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.815018 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.815032 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.815049 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.815060 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:15Z","lastTransitionTime":"2025-12-05T19:15:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.918921 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.918989 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.919013 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.919042 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:15 crc kubenswrapper[4982]: I1205 19:15:15.919062 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:15Z","lastTransitionTime":"2025-12-05T19:15:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.021657 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.021747 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.021771 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.021803 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.021827 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:16Z","lastTransitionTime":"2025-12-05T19:15:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.125405 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.125477 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.125496 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.125523 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.125542 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:16Z","lastTransitionTime":"2025-12-05T19:15:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.228806 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.228884 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.228908 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.228939 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.228967 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:16Z","lastTransitionTime":"2025-12-05T19:15:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.331898 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.331945 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.331957 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.331974 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.331986 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:16Z","lastTransitionTime":"2025-12-05T19:15:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.389941 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.390262 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:16 crc kubenswrapper[4982]: E1205 19:15:16.390266 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.390311 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.390433 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:16 crc kubenswrapper[4982]: E1205 19:15:16.390909 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:16 crc kubenswrapper[4982]: E1205 19:15:16.391034 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:16 crc kubenswrapper[4982]: E1205 19:15:16.391099 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.391573 4982 scope.go:117] "RemoveContainer" containerID="44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682" Dec 05 19:15:16 crc kubenswrapper[4982]: E1205 19:15:16.391900 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.434642 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.434705 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.434721 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.434744 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.434760 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:16Z","lastTransitionTime":"2025-12-05T19:15:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.537782 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.537820 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.537829 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.537841 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.537849 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:16Z","lastTransitionTime":"2025-12-05T19:15:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.640385 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.640421 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.640430 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.640443 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.640452 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:16Z","lastTransitionTime":"2025-12-05T19:15:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.743474 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.743516 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.743530 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.743547 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.743590 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:16Z","lastTransitionTime":"2025-12-05T19:15:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.846816 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.846864 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.846878 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.846896 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.846908 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:16Z","lastTransitionTime":"2025-12-05T19:15:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.949739 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.949785 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.949806 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.949859 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:16 crc kubenswrapper[4982]: I1205 19:15:16.949887 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:16Z","lastTransitionTime":"2025-12-05T19:15:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.052262 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.052297 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.052305 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.052318 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.052327 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:17Z","lastTransitionTime":"2025-12-05T19:15:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.155222 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.155293 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.155305 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.155321 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.155331 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:17Z","lastTransitionTime":"2025-12-05T19:15:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.258306 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.258374 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.258391 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.258415 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.258434 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:17Z","lastTransitionTime":"2025-12-05T19:15:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.361744 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.361795 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.361842 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.361865 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.361882 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:17Z","lastTransitionTime":"2025-12-05T19:15:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.464555 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.464595 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.464620 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.464635 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.464645 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:17Z","lastTransitionTime":"2025-12-05T19:15:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.568324 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.568458 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.568477 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.568502 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.568549 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:17Z","lastTransitionTime":"2025-12-05T19:15:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.671753 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.671816 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.671841 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.671870 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.671891 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:17Z","lastTransitionTime":"2025-12-05T19:15:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.774954 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.775011 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.775029 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.775053 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.775070 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:17Z","lastTransitionTime":"2025-12-05T19:15:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.878832 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.878916 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.878939 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.878968 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.878994 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:17Z","lastTransitionTime":"2025-12-05T19:15:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.982406 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.982469 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.982488 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.982513 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:17 crc kubenswrapper[4982]: I1205 19:15:17.982531 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:17Z","lastTransitionTime":"2025-12-05T19:15:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.084475 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.084525 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.084546 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.084568 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.084582 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:18Z","lastTransitionTime":"2025-12-05T19:15:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.188076 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.188170 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.188186 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.188203 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.188216 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:18Z","lastTransitionTime":"2025-12-05T19:15:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.290584 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.290634 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.290644 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.290658 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.290665 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:18Z","lastTransitionTime":"2025-12-05T19:15:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.389949 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.389978 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.390031 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:18 crc kubenswrapper[4982]: E1205 19:15:18.390220 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.390522 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:18 crc kubenswrapper[4982]: E1205 19:15:18.390616 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:18 crc kubenswrapper[4982]: E1205 19:15:18.390817 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:18 crc kubenswrapper[4982]: E1205 19:15:18.391326 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.397501 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.397542 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.397562 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.397581 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.397596 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:18Z","lastTransitionTime":"2025-12-05T19:15:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.500124 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.500187 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.500195 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.500207 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.500215 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:18Z","lastTransitionTime":"2025-12-05T19:15:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.602609 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.603425 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.603549 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.603657 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.603858 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:18Z","lastTransitionTime":"2025-12-05T19:15:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.707479 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.707854 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.708059 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.708307 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.708496 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:18Z","lastTransitionTime":"2025-12-05T19:15:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.812180 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.812226 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.812243 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.812265 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.812282 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:18Z","lastTransitionTime":"2025-12-05T19:15:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.915051 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.915098 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.915115 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.915189 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:18 crc kubenswrapper[4982]: I1205 19:15:18.915221 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:18Z","lastTransitionTime":"2025-12-05T19:15:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.018352 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.018412 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.018434 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.018462 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.018486 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:19Z","lastTransitionTime":"2025-12-05T19:15:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.122330 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.122528 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.123205 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.123275 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.123294 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:19Z","lastTransitionTime":"2025-12-05T19:15:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.226191 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.226254 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.226275 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.226298 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.226316 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:19Z","lastTransitionTime":"2025-12-05T19:15:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.328949 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.328990 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.328998 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.329013 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.329022 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:19Z","lastTransitionTime":"2025-12-05T19:15:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.382725 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.382768 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.382779 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.382795 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.382807 4982 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T19:15:19Z","lastTransitionTime":"2025-12-05T19:15:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.440019 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb"] Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.440434 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.444860 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.445011 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.446015 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.446131 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.450279 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/89cca5a1-1d91-4990-ad39-a3393b5da9a7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.450315 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/89cca5a1-1d91-4990-ad39-a3393b5da9a7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.450339 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89cca5a1-1d91-4990-ad39-a3393b5da9a7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.450404 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/89cca5a1-1d91-4990-ad39-a3393b5da9a7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.450424 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89cca5a1-1d91-4990-ad39-a3393b5da9a7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.517859 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=75.517841902 podStartE2EDuration="1m15.517841902s" podCreationTimestamp="2025-12-05 19:14:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:19.501008964 +0000 UTC m=+98.382895029" watchObservedRunningTime="2025-12-05 19:15:19.517841902 +0000 UTC m=+98.399727907" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.533622 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=45.533603043 podStartE2EDuration="45.533603043s" podCreationTimestamp="2025-12-05 19:14:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:19.533222863 +0000 UTC m=+98.415108868" watchObservedRunningTime="2025-12-05 19:15:19.533603043 +0000 UTC m=+98.415489038" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.551770 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89cca5a1-1d91-4990-ad39-a3393b5da9a7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.551834 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/89cca5a1-1d91-4990-ad39-a3393b5da9a7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.551856 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/89cca5a1-1d91-4990-ad39-a3393b5da9a7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.551876 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89cca5a1-1d91-4990-ad39-a3393b5da9a7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.551938 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/89cca5a1-1d91-4990-ad39-a3393b5da9a7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.552428 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/89cca5a1-1d91-4990-ad39-a3393b5da9a7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.552461 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/89cca5a1-1d91-4990-ad39-a3393b5da9a7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.552951 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/89cca5a1-1d91-4990-ad39-a3393b5da9a7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.568740 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/89cca5a1-1d91-4990-ad39-a3393b5da9a7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.582590 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/89cca5a1-1d91-4990-ad39-a3393b5da9a7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-bjjmb\" (UID: \"89cca5a1-1d91-4990-ad39-a3393b5da9a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.642187 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-tlc5r" podStartSLOduration=70.64216885 podStartE2EDuration="1m10.64216885s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:19.642059278 +0000 UTC m=+98.523945273" watchObservedRunningTime="2025-12-05 19:15:19.64216885 +0000 UTC m=+98.524054855" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.675055 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-7q67q" podStartSLOduration=70.675039767 podStartE2EDuration="1m10.675039767s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:19.66403571 +0000 UTC m=+98.545921705" watchObservedRunningTime="2025-12-05 19:15:19.675039767 +0000 UTC m=+98.556925762" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.682966 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-j2bqj" podStartSLOduration=69.682949363 podStartE2EDuration="1m9.682949363s" podCreationTimestamp="2025-12-05 19:14:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:19.674660587 +0000 UTC m=+98.556546582" watchObservedRunningTime="2025-12-05 19:15:19.682949363 +0000 UTC m=+98.564835358" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.683129 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=16.683122327 podStartE2EDuration="16.683122327s" podCreationTimestamp="2025-12-05 19:15:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:19.682620324 +0000 UTC m=+98.564506319" watchObservedRunningTime="2025-12-05 19:15:19.683122327 +0000 UTC m=+98.565008342" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.724239 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=75.724215217 podStartE2EDuration="1m15.724215217s" podCreationTimestamp="2025-12-05 19:14:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:19.711505276 +0000 UTC m=+98.593391301" watchObservedRunningTime="2025-12-05 19:15:19.724215217 +0000 UTC m=+98.606101222" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.736096 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-8ldph" podStartSLOduration=70.736078356 podStartE2EDuration="1m10.736078356s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:19.73545821 +0000 UTC m=+98.617344225" watchObservedRunningTime="2025-12-05 19:15:19.736078356 +0000 UTC m=+98.617964351" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.746325 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podStartSLOduration=70.746310323 podStartE2EDuration="1m10.746310323s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:19.744112035 +0000 UTC m=+98.625998030" watchObservedRunningTime="2025-12-05 19:15:19.746310323 +0000 UTC m=+98.628196318" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.754822 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" Dec 05 19:15:19 crc kubenswrapper[4982]: W1205 19:15:19.768680 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod89cca5a1_1d91_4990_ad39_a3393b5da9a7.slice/crio-879bfbfb4a24a440da9a92ef8a88a5a1888b482b8f3562fff54bbdd535c2f228 WatchSource:0}: Error finding container 879bfbfb4a24a440da9a92ef8a88a5a1888b482b8f3562fff54bbdd535c2f228: Status 404 returned error can't find the container with id 879bfbfb4a24a440da9a92ef8a88a5a1888b482b8f3562fff54bbdd535c2f228 Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.780837 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=75.780820412 podStartE2EDuration="1m15.780820412s" podCreationTimestamp="2025-12-05 19:14:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:19.780587395 +0000 UTC m=+98.662473390" watchObservedRunningTime="2025-12-05 19:15:19.780820412 +0000 UTC m=+98.662706407" Dec 05 19:15:19 crc kubenswrapper[4982]: I1205 19:15:19.781904 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-hbm8m" podStartSLOduration=70.78189835 podStartE2EDuration="1m10.78189835s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:19.753769997 +0000 UTC m=+98.635655992" watchObservedRunningTime="2025-12-05 19:15:19.78189835 +0000 UTC m=+98.663784345" Dec 05 19:15:20 crc kubenswrapper[4982]: I1205 19:15:20.074733 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" event={"ID":"89cca5a1-1d91-4990-ad39-a3393b5da9a7","Type":"ContainerStarted","Data":"21c51dbd1ac904ead85bdaeea170253feaec5624cdcb14293cf68252164fff14"} Dec 05 19:15:20 crc kubenswrapper[4982]: I1205 19:15:20.074779 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" event={"ID":"89cca5a1-1d91-4990-ad39-a3393b5da9a7","Type":"ContainerStarted","Data":"879bfbfb4a24a440da9a92ef8a88a5a1888b482b8f3562fff54bbdd535c2f228"} Dec 05 19:15:20 crc kubenswrapper[4982]: I1205 19:15:20.098123 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-bjjmb" podStartSLOduration=71.098099775 podStartE2EDuration="1m11.098099775s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:20.096675498 +0000 UTC m=+98.978561503" watchObservedRunningTime="2025-12-05 19:15:20.098099775 +0000 UTC m=+98.979985810" Dec 05 19:15:20 crc kubenswrapper[4982]: I1205 19:15:20.389959 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:20 crc kubenswrapper[4982]: I1205 19:15:20.389995 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:20 crc kubenswrapper[4982]: I1205 19:15:20.390102 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:20 crc kubenswrapper[4982]: E1205 19:15:20.390222 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:20 crc kubenswrapper[4982]: E1205 19:15:20.390333 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:20 crc kubenswrapper[4982]: I1205 19:15:20.390360 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:20 crc kubenswrapper[4982]: E1205 19:15:20.390485 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:20 crc kubenswrapper[4982]: E1205 19:15:20.390638 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:22 crc kubenswrapper[4982]: I1205 19:15:22.389621 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:22 crc kubenswrapper[4982]: I1205 19:15:22.389656 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:22 crc kubenswrapper[4982]: I1205 19:15:22.389697 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:22 crc kubenswrapper[4982]: I1205 19:15:22.389765 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:22 crc kubenswrapper[4982]: E1205 19:15:22.389916 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:22 crc kubenswrapper[4982]: E1205 19:15:22.390018 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:22 crc kubenswrapper[4982]: E1205 19:15:22.390126 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:22 crc kubenswrapper[4982]: E1205 19:15:22.390235 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:24 crc kubenswrapper[4982]: I1205 19:15:24.390023 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:24 crc kubenswrapper[4982]: E1205 19:15:24.390529 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:24 crc kubenswrapper[4982]: I1205 19:15:24.390047 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:24 crc kubenswrapper[4982]: I1205 19:15:24.390227 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:24 crc kubenswrapper[4982]: E1205 19:15:24.390807 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:24 crc kubenswrapper[4982]: I1205 19:15:24.390112 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:24 crc kubenswrapper[4982]: E1205 19:15:24.390962 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:24 crc kubenswrapper[4982]: E1205 19:15:24.391093 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:26 crc kubenswrapper[4982]: I1205 19:15:26.389764 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:26 crc kubenswrapper[4982]: I1205 19:15:26.389858 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:26 crc kubenswrapper[4982]: I1205 19:15:26.389789 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:26 crc kubenswrapper[4982]: I1205 19:15:26.389977 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:26 crc kubenswrapper[4982]: E1205 19:15:26.389969 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:26 crc kubenswrapper[4982]: E1205 19:15:26.390293 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:26 crc kubenswrapper[4982]: E1205 19:15:26.390321 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:26 crc kubenswrapper[4982]: E1205 19:15:26.390401 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:28 crc kubenswrapper[4982]: I1205 19:15:28.049963 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:28 crc kubenswrapper[4982]: E1205 19:15:28.050220 4982 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:15:28 crc kubenswrapper[4982]: E1205 19:15:28.050351 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs podName:4d68836a-462d-4364-bc12-b530a7cb0727 nodeName:}" failed. No retries permitted until 2025-12-05 19:16:32.050315858 +0000 UTC m=+170.932201893 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs") pod "network-metrics-daemon-6r5ns" (UID: "4d68836a-462d-4364-bc12-b530a7cb0727") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 19:15:28 crc kubenswrapper[4982]: I1205 19:15:28.389730 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:28 crc kubenswrapper[4982]: I1205 19:15:28.389776 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:28 crc kubenswrapper[4982]: E1205 19:15:28.389867 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:28 crc kubenswrapper[4982]: I1205 19:15:28.389923 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:28 crc kubenswrapper[4982]: E1205 19:15:28.389984 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:28 crc kubenswrapper[4982]: E1205 19:15:28.390059 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:28 crc kubenswrapper[4982]: I1205 19:15:28.390081 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:28 crc kubenswrapper[4982]: E1205 19:15:28.390337 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:29 crc kubenswrapper[4982]: I1205 19:15:29.391419 4982 scope.go:117] "RemoveContainer" containerID="44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682" Dec 05 19:15:29 crc kubenswrapper[4982]: E1205 19:15:29.391782 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-xlt6h_openshift-ovn-kubernetes(511e6b4b-3bb0-4288-9e2d-2d21485ef74c)\"" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" Dec 05 19:15:30 crc kubenswrapper[4982]: I1205 19:15:30.390130 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:30 crc kubenswrapper[4982]: I1205 19:15:30.390190 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:30 crc kubenswrapper[4982]: I1205 19:15:30.390278 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:30 crc kubenswrapper[4982]: I1205 19:15:30.390285 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:30 crc kubenswrapper[4982]: E1205 19:15:30.390427 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:30 crc kubenswrapper[4982]: E1205 19:15:30.390601 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:30 crc kubenswrapper[4982]: E1205 19:15:30.390725 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:30 crc kubenswrapper[4982]: E1205 19:15:30.390861 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:32 crc kubenswrapper[4982]: I1205 19:15:32.390375 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:32 crc kubenswrapper[4982]: I1205 19:15:32.390451 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:32 crc kubenswrapper[4982]: I1205 19:15:32.390460 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:32 crc kubenswrapper[4982]: I1205 19:15:32.390508 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:32 crc kubenswrapper[4982]: E1205 19:15:32.391565 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:32 crc kubenswrapper[4982]: E1205 19:15:32.391748 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:32 crc kubenswrapper[4982]: E1205 19:15:32.392096 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:32 crc kubenswrapper[4982]: E1205 19:15:32.392279 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:34 crc kubenswrapper[4982]: I1205 19:15:34.389795 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:34 crc kubenswrapper[4982]: I1205 19:15:34.389856 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:34 crc kubenswrapper[4982]: I1205 19:15:34.389893 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:34 crc kubenswrapper[4982]: I1205 19:15:34.390208 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:34 crc kubenswrapper[4982]: E1205 19:15:34.390212 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:34 crc kubenswrapper[4982]: E1205 19:15:34.390435 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:34 crc kubenswrapper[4982]: E1205 19:15:34.390529 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:34 crc kubenswrapper[4982]: E1205 19:15:34.390638 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:36 crc kubenswrapper[4982]: I1205 19:15:36.389145 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:36 crc kubenswrapper[4982]: I1205 19:15:36.389235 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:36 crc kubenswrapper[4982]: I1205 19:15:36.389187 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:36 crc kubenswrapper[4982]: I1205 19:15:36.389385 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:36 crc kubenswrapper[4982]: E1205 19:15:36.389494 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:36 crc kubenswrapper[4982]: E1205 19:15:36.389597 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:36 crc kubenswrapper[4982]: E1205 19:15:36.389984 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:36 crc kubenswrapper[4982]: E1205 19:15:36.390311 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:38 crc kubenswrapper[4982]: I1205 19:15:38.390013 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:38 crc kubenswrapper[4982]: I1205 19:15:38.390013 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:38 crc kubenswrapper[4982]: I1205 19:15:38.390040 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:38 crc kubenswrapper[4982]: E1205 19:15:38.390289 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:38 crc kubenswrapper[4982]: E1205 19:15:38.390384 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:38 crc kubenswrapper[4982]: I1205 19:15:38.390441 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:38 crc kubenswrapper[4982]: E1205 19:15:38.390525 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:38 crc kubenswrapper[4982]: E1205 19:15:38.390605 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:40 crc kubenswrapper[4982]: I1205 19:15:40.389851 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:40 crc kubenswrapper[4982]: I1205 19:15:40.389914 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:40 crc kubenswrapper[4982]: E1205 19:15:40.390103 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:40 crc kubenswrapper[4982]: I1205 19:15:40.390312 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:40 crc kubenswrapper[4982]: I1205 19:15:40.390425 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:40 crc kubenswrapper[4982]: E1205 19:15:40.390400 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:40 crc kubenswrapper[4982]: E1205 19:15:40.390578 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:40 crc kubenswrapper[4982]: E1205 19:15:40.390653 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:41 crc kubenswrapper[4982]: E1205 19:15:41.336012 4982 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 05 19:15:41 crc kubenswrapper[4982]: E1205 19:15:41.485448 4982 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:15:42 crc kubenswrapper[4982]: I1205 19:15:42.389895 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:42 crc kubenswrapper[4982]: I1205 19:15:42.389975 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:42 crc kubenswrapper[4982]: I1205 19:15:42.390002 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:42 crc kubenswrapper[4982]: E1205 19:15:42.390035 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:42 crc kubenswrapper[4982]: I1205 19:15:42.389903 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:42 crc kubenswrapper[4982]: E1205 19:15:42.390192 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:42 crc kubenswrapper[4982]: E1205 19:15:42.390281 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:42 crc kubenswrapper[4982]: E1205 19:15:42.390348 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:43 crc kubenswrapper[4982]: I1205 19:15:43.162025 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8ldph_62e7c34f-d411-481e-a5bb-885e7cbd4326/kube-multus/1.log" Dec 05 19:15:43 crc kubenswrapper[4982]: I1205 19:15:43.163325 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8ldph_62e7c34f-d411-481e-a5bb-885e7cbd4326/kube-multus/0.log" Dec 05 19:15:43 crc kubenswrapper[4982]: I1205 19:15:43.163394 4982 generic.go:334] "Generic (PLEG): container finished" podID="62e7c34f-d411-481e-a5bb-885e7cbd4326" containerID="2a8d874cf486b2c543493d14189c417e5514f5a9af33741f08b0993528368bbb" exitCode=1 Dec 05 19:15:43 crc kubenswrapper[4982]: I1205 19:15:43.163440 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8ldph" event={"ID":"62e7c34f-d411-481e-a5bb-885e7cbd4326","Type":"ContainerDied","Data":"2a8d874cf486b2c543493d14189c417e5514f5a9af33741f08b0993528368bbb"} Dec 05 19:15:43 crc kubenswrapper[4982]: I1205 19:15:43.163494 4982 scope.go:117] "RemoveContainer" containerID="d92986d6e046878ebfb15f9cb7f460a0f2cd24ed73e81667dbb52aa3c400f016" Dec 05 19:15:43 crc kubenswrapper[4982]: I1205 19:15:43.164087 4982 scope.go:117] "RemoveContainer" containerID="2a8d874cf486b2c543493d14189c417e5514f5a9af33741f08b0993528368bbb" Dec 05 19:15:43 crc kubenswrapper[4982]: E1205 19:15:43.164401 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-8ldph_openshift-multus(62e7c34f-d411-481e-a5bb-885e7cbd4326)\"" pod="openshift-multus/multus-8ldph" podUID="62e7c34f-d411-481e-a5bb-885e7cbd4326" Dec 05 19:15:43 crc kubenswrapper[4982]: I1205 19:15:43.391897 4982 scope.go:117] "RemoveContainer" containerID="44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682" Dec 05 19:15:44 crc kubenswrapper[4982]: I1205 19:15:44.168292 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/3.log" Dec 05 19:15:44 crc kubenswrapper[4982]: I1205 19:15:44.171192 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerStarted","Data":"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f"} Dec 05 19:15:44 crc kubenswrapper[4982]: I1205 19:15:44.172227 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:15:44 crc kubenswrapper[4982]: I1205 19:15:44.173649 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8ldph_62e7c34f-d411-481e-a5bb-885e7cbd4326/kube-multus/1.log" Dec 05 19:15:44 crc kubenswrapper[4982]: I1205 19:15:44.241472 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podStartSLOduration=95.241437848 podStartE2EDuration="1m35.241437848s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:15:44.215886993 +0000 UTC m=+123.097773018" watchObservedRunningTime="2025-12-05 19:15:44.241437848 +0000 UTC m=+123.123323923" Dec 05 19:15:44 crc kubenswrapper[4982]: I1205 19:15:44.242943 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-6r5ns"] Dec 05 19:15:44 crc kubenswrapper[4982]: I1205 19:15:44.243123 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:44 crc kubenswrapper[4982]: E1205 19:15:44.243367 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:44 crc kubenswrapper[4982]: I1205 19:15:44.389359 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:44 crc kubenswrapper[4982]: I1205 19:15:44.389389 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:44 crc kubenswrapper[4982]: I1205 19:15:44.389443 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:44 crc kubenswrapper[4982]: E1205 19:15:44.389532 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:44 crc kubenswrapper[4982]: E1205 19:15:44.389671 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:44 crc kubenswrapper[4982]: E1205 19:15:44.389752 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:46 crc kubenswrapper[4982]: I1205 19:15:46.389422 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:46 crc kubenswrapper[4982]: I1205 19:15:46.389513 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:46 crc kubenswrapper[4982]: E1205 19:15:46.389586 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:46 crc kubenswrapper[4982]: E1205 19:15:46.389665 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:46 crc kubenswrapper[4982]: I1205 19:15:46.389736 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:46 crc kubenswrapper[4982]: E1205 19:15:46.389802 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:46 crc kubenswrapper[4982]: I1205 19:15:46.389841 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:46 crc kubenswrapper[4982]: E1205 19:15:46.389893 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:46 crc kubenswrapper[4982]: E1205 19:15:46.486744 4982 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:15:48 crc kubenswrapper[4982]: I1205 19:15:48.389772 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:48 crc kubenswrapper[4982]: I1205 19:15:48.390019 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:48 crc kubenswrapper[4982]: I1205 19:15:48.389988 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:48 crc kubenswrapper[4982]: E1205 19:15:48.390119 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:48 crc kubenswrapper[4982]: I1205 19:15:48.389881 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:48 crc kubenswrapper[4982]: E1205 19:15:48.390237 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:48 crc kubenswrapper[4982]: E1205 19:15:48.390361 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:48 crc kubenswrapper[4982]: E1205 19:15:48.390444 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:49 crc kubenswrapper[4982]: I1205 19:15:49.433414 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:15:50 crc kubenswrapper[4982]: I1205 19:15:50.390105 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:50 crc kubenswrapper[4982]: I1205 19:15:50.390199 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:50 crc kubenswrapper[4982]: I1205 19:15:50.390224 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:50 crc kubenswrapper[4982]: E1205 19:15:50.390352 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:50 crc kubenswrapper[4982]: I1205 19:15:50.390488 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:50 crc kubenswrapper[4982]: E1205 19:15:50.390646 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:50 crc kubenswrapper[4982]: E1205 19:15:50.390817 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:50 crc kubenswrapper[4982]: E1205 19:15:50.390879 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:51 crc kubenswrapper[4982]: E1205 19:15:51.487662 4982 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:15:52 crc kubenswrapper[4982]: I1205 19:15:52.389604 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:52 crc kubenswrapper[4982]: E1205 19:15:52.390354 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:52 crc kubenswrapper[4982]: I1205 19:15:52.389667 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:52 crc kubenswrapper[4982]: I1205 19:15:52.389664 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:52 crc kubenswrapper[4982]: I1205 19:15:52.389749 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:52 crc kubenswrapper[4982]: E1205 19:15:52.390965 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:52 crc kubenswrapper[4982]: E1205 19:15:52.391105 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:52 crc kubenswrapper[4982]: E1205 19:15:52.390990 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:54 crc kubenswrapper[4982]: I1205 19:15:54.389284 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:54 crc kubenswrapper[4982]: I1205 19:15:54.389335 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:54 crc kubenswrapper[4982]: I1205 19:15:54.389281 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:54 crc kubenswrapper[4982]: I1205 19:15:54.389479 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:54 crc kubenswrapper[4982]: E1205 19:15:54.389482 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:54 crc kubenswrapper[4982]: E1205 19:15:54.389563 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:54 crc kubenswrapper[4982]: E1205 19:15:54.389629 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:54 crc kubenswrapper[4982]: E1205 19:15:54.389685 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:55 crc kubenswrapper[4982]: I1205 19:15:55.390118 4982 scope.go:117] "RemoveContainer" containerID="2a8d874cf486b2c543493d14189c417e5514f5a9af33741f08b0993528368bbb" Dec 05 19:15:56 crc kubenswrapper[4982]: I1205 19:15:56.221249 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8ldph_62e7c34f-d411-481e-a5bb-885e7cbd4326/kube-multus/1.log" Dec 05 19:15:56 crc kubenswrapper[4982]: I1205 19:15:56.221682 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8ldph" event={"ID":"62e7c34f-d411-481e-a5bb-885e7cbd4326","Type":"ContainerStarted","Data":"3524cbce9c0eb2e2fc04acf3c6d9153a434e42cf73e554bfa8a53608ce416b87"} Dec 05 19:15:56 crc kubenswrapper[4982]: I1205 19:15:56.390329 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:56 crc kubenswrapper[4982]: I1205 19:15:56.390428 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:56 crc kubenswrapper[4982]: I1205 19:15:56.390537 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:56 crc kubenswrapper[4982]: I1205 19:15:56.390393 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:56 crc kubenswrapper[4982]: E1205 19:15:56.390601 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:15:56 crc kubenswrapper[4982]: E1205 19:15:56.391118 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:56 crc kubenswrapper[4982]: E1205 19:15:56.391011 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:56 crc kubenswrapper[4982]: E1205 19:15:56.391230 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:56 crc kubenswrapper[4982]: E1205 19:15:56.488690 4982 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:15:58 crc kubenswrapper[4982]: I1205 19:15:58.389273 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:15:58 crc kubenswrapper[4982]: I1205 19:15:58.389374 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:15:58 crc kubenswrapper[4982]: I1205 19:15:58.390413 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:15:58 crc kubenswrapper[4982]: E1205 19:15:58.390568 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:15:58 crc kubenswrapper[4982]: E1205 19:15:58.390688 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:15:58 crc kubenswrapper[4982]: I1205 19:15:58.390684 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:15:58 crc kubenswrapper[4982]: E1205 19:15:58.390869 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:15:58 crc kubenswrapper[4982]: E1205 19:15:58.391035 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:16:00 crc kubenswrapper[4982]: I1205 19:16:00.389678 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:16:00 crc kubenswrapper[4982]: I1205 19:16:00.389761 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:16:00 crc kubenswrapper[4982]: I1205 19:16:00.389890 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:16:00 crc kubenswrapper[4982]: E1205 19:16:00.389956 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 19:16:00 crc kubenswrapper[4982]: I1205 19:16:00.389996 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:16:00 crc kubenswrapper[4982]: E1205 19:16:00.390238 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6r5ns" podUID="4d68836a-462d-4364-bc12-b530a7cb0727" Dec 05 19:16:00 crc kubenswrapper[4982]: E1205 19:16:00.390375 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 19:16:00 crc kubenswrapper[4982]: E1205 19:16:00.390545 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 19:16:02 crc kubenswrapper[4982]: I1205 19:16:02.389274 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:16:02 crc kubenswrapper[4982]: I1205 19:16:02.389323 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:16:02 crc kubenswrapper[4982]: I1205 19:16:02.389354 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:16:02 crc kubenswrapper[4982]: I1205 19:16:02.389391 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:16:02 crc kubenswrapper[4982]: I1205 19:16:02.392851 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 19:16:02 crc kubenswrapper[4982]: I1205 19:16:02.393318 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 19:16:02 crc kubenswrapper[4982]: I1205 19:16:02.393647 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 19:16:02 crc kubenswrapper[4982]: I1205 19:16:02.393859 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 19:16:02 crc kubenswrapper[4982]: I1205 19:16:02.394128 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 19:16:02 crc kubenswrapper[4982]: I1205 19:16:02.394370 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.798615 4982 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.851050 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.851888 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.853592 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-59w99"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.854281 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.856292 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.857195 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.860914 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.861899 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.862883 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9z6x6"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.863944 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.878319 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8"] Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.879822 4982 reflector.go:561] object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4": failed to list *v1.Secret: secrets "machine-approver-sa-dockercfg-nl2j4" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.879875 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"machine-approver-sa-dockercfg-nl2j4\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-approver-sa-dockercfg-nl2j4\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.880015 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.900671 4982 reflector.go:561] object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2": failed to list *v1.Secret: secrets "route-controller-manager-sa-dockercfg-h2zr2" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.900758 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"route-controller-manager-sa-dockercfg-h2zr2\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"route-controller-manager-sa-dockercfg-h2zr2\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.900867 4982 reflector.go:561] object-"openshift-cluster-machine-approver"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.900890 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.900949 4982 reflector.go:561] object-"openshift-cluster-machine-approver"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.900966 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.901031 4982 reflector.go:561] object-"openshift-cluster-machine-approver"/"machine-approver-tls": failed to list *v1.Secret: secrets "machine-approver-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.901050 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"machine-approver-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-approver-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.901078 4982 reflector.go:561] object-"openshift-authentication-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.901094 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.901124 4982 reflector.go:561] object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj": failed to list *v1.Secret: secrets "authentication-operator-dockercfg-mz9bj" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.901140 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"authentication-operator-dockercfg-mz9bj\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"authentication-operator-dockercfg-mz9bj\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.900042 4982 reflector.go:561] object-"openshift-authentication-operator"/"authentication-operator-config": failed to list *v1.ConfigMap: configmaps "authentication-operator-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.901201 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"authentication-operator-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"authentication-operator-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.901249 4982 reflector.go:561] object-"openshift-route-controller-manager"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.901266 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.901287 4982 reflector.go:561] object-"openshift-authentication-operator"/"service-ca-bundle": failed to list *v1.ConfigMap: configmaps "service-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.901304 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"service-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"service-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.901838 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.902084 4982 reflector.go:561] object-"openshift-authentication-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.902237 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.902601 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.903003 4982 reflector.go:561] object-"openshift-authentication-operator"/"trusted-ca-bundle": failed to list *v1.ConfigMap: configmaps "trusted-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.903109 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"trusted-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"trusted-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.903769 4982 reflector.go:561] object-"openshift-route-controller-manager"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.903886 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.903781 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.904086 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5"] Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.904176 4982 reflector.go:561] object-"openshift-authentication-operator"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.904230 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.904300 4982 reflector.go:561] object-"openshift-cluster-machine-approver"/"machine-approver-config": failed to list *v1.ConfigMap: configmaps "machine-approver-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.904344 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"machine-approver-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"machine-approver-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.904583 4982 reflector.go:561] object-"openshift-route-controller-manager"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.904604 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.903840 4982 reflector.go:561] object-"openshift-route-controller-manager"/"client-ca": failed to list *v1.ConfigMap: configmaps "client-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.904638 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"client-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"client-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.904991 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.906408 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 19:16:10 crc kubenswrapper[4982]: W1205 19:16:10.907237 4982 reflector.go:561] object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-cluster-machine-approver": no relationship found between node 'crc' and this object Dec 05 19:16:10 crc kubenswrapper[4982]: E1205 19:16:10.907276 4982 reflector.go:158] "Unhandled Error" err="object-\"openshift-cluster-machine-approver\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-cluster-machine-approver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.907367 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.907599 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.910679 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-w8n5n"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.911478 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.912314 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.912884 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.914083 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.914453 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.914655 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.914885 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.915064 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.916300 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.916573 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.916762 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.916954 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.917203 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.917502 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.919219 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.919260 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.919321 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-k66n9"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.919848 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.920766 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bzhst"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.921116 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.921321 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.924554 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-p8gsl"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.924991 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.925129 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-p8gsl" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.931508 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.936958 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.937576 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.941352 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-z7tw9"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.941841 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943753 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xv69x\" (UniqueName: \"kubernetes.io/projected/54fb6e13-86a5-45f3-8640-28735d6db34f-kube-api-access-xv69x\") pod \"openshift-config-operator-7777fb866f-rwjj5\" (UID: \"54fb6e13-86a5-45f3-8640-28735d6db34f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943782 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsr2n\" (UniqueName: \"kubernetes.io/projected/a3ef803e-459d-4ade-abe6-e2201b265b09-kube-api-access-lsr2n\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943805 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943822 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943838 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a3ef803e-459d-4ade-abe6-e2201b265b09-audit-dir\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943868 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-audit-policies\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943885 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-audit-dir\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943899 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jt2pt\" (UniqueName: \"kubernetes.io/projected/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-kube-api-access-jt2pt\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943923 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-machine-approver-tls\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943939 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943952 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943970 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f105472-b420-4bb3-877d-663d96eed1af-serving-cert\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.943984 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944000 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-config\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944014 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944029 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-serving-cert\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944045 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-encryption-config\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944059 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-auth-proxy-config\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944074 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944090 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk2bw\" (UniqueName: \"kubernetes.io/projected/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-kube-api-access-sk2bw\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944105 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-service-ca-bundle\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944119 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944135 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/403d803f-574a-47bc-a375-d06d37cb31d9-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2x2f8\" (UID: \"403d803f-574a-47bc-a375-d06d37cb31d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944191 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vf2p\" (UniqueName: \"kubernetes.io/projected/403d803f-574a-47bc-a375-d06d37cb31d9-kube-api-access-7vf2p\") pod \"openshift-apiserver-operator-796bbdcf4f-2x2f8\" (UID: \"403d803f-574a-47bc-a375-d06d37cb31d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944257 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8bhn\" (UniqueName: \"kubernetes.io/projected/4f105472-b420-4bb3-877d-663d96eed1af-kube-api-access-d8bhn\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944282 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944304 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-config\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944327 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944348 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944372 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/54fb6e13-86a5-45f3-8640-28735d6db34f-available-featuregates\") pod \"openshift-config-operator-7777fb866f-rwjj5\" (UID: \"54fb6e13-86a5-45f3-8640-28735d6db34f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944396 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-config\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944416 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-etcd-client\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944447 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/403d803f-574a-47bc-a375-d06d37cb31d9-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2x2f8\" (UID: \"403d803f-574a-47bc-a375-d06d37cb31d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944480 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-audit-policies\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944503 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x52p\" (UniqueName: \"kubernetes.io/projected/b02dee03-310b-4a43-b1ad-de5efd1031a3-kube-api-access-8x52p\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944522 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/54fb6e13-86a5-45f3-8640-28735d6db34f-serving-cert\") pod \"openshift-config-operator-7777fb866f-rwjj5\" (UID: \"54fb6e13-86a5-45f3-8640-28735d6db34f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944541 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944562 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944588 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-client-ca\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944609 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b02dee03-310b-4a43-b1ad-de5efd1031a3-serving-cert\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.944630 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.946285 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.946352 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.947350 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.947528 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.948328 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.948450 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.948690 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.948917 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.951939 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.952888 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.953194 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.956965 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.957312 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.957530 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.957632 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.957838 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.958054 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.958401 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.958612 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.958785 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.958962 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.959377 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.960331 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.961502 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.961627 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.961745 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.962475 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.962794 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.962862 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.963324 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.962915 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.972611 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-bh4fj"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.972652 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.982043 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.982733 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.982825 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.982919 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.983960 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.984075 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.984174 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.984263 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.984342 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.985055 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.985332 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.985345 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.985656 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9qkfb"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.986329 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.986766 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.987083 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.987324 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.988015 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.991331 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6tbm7"] Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.991951 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-6tbm7" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.992289 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.994823 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.994869 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.997067 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.997390 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.997531 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.997459 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.997664 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.997795 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.997759 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.998133 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 19:16:10 crc kubenswrapper[4982]: I1205 19:16:10.999863 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.000256 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.000788 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.002618 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.003716 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-hd46v"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.004247 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hd46v" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.004646 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.006066 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.006237 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.006554 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.006687 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.006874 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.007101 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.007340 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.007144 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.007685 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.011602 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.016283 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.017074 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-59w99"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.017282 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.017416 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.026249 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.026858 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.028110 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.029132 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.030729 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xtdn4"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.031826 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-xtdn4" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.033573 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-xzjmw"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.034643 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.040273 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.041095 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.041121 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.041347 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.042924 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.042966 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-sp47z"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.044461 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.044605 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.044896 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.045941 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-client-ca\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.045971 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b02dee03-310b-4a43-b1ad-de5efd1031a3-serving-cert\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.045993 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.046013 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.046037 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.046086 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d832b229-50b0-4f09-a892-eb36e39004fb-serving-cert\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.046117 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsr2n\" (UniqueName: \"kubernetes.io/projected/a3ef803e-459d-4ade-abe6-e2201b265b09-kube-api-access-lsr2n\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.046136 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6897af3a-9b39-4269-bcbd-e8a4496ae400-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-c4ssd\" (UID: \"6897af3a-9b39-4269-bcbd-e8a4496ae400\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.046177 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ca52d832-2bf9-49ca-a601-fc00d355efa3-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xkt4w\" (UID: \"ca52d832-2bf9-49ca-a601-fc00d355efa3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.046209 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xv69x\" (UniqueName: \"kubernetes.io/projected/54fb6e13-86a5-45f3-8640-28735d6db34f-kube-api-access-xv69x\") pod \"openshift-config-operator-7777fb866f-rwjj5\" (UID: \"54fb6e13-86a5-45f3-8640-28735d6db34f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.046282 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.046615 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.046894 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a3ef803e-459d-4ade-abe6-e2201b265b09-audit-dir\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.046942 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a3ef803e-459d-4ade-abe6-e2201b265b09-audit-dir\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.047025 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.047207 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.047271 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/8d4c7ce2-7724-494b-b86a-23627074ce45-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-bh4fj\" (UID: \"8d4c7ce2-7724-494b-b86a-23627074ce45\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.047343 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-console-config\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.047378 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52af244c-8bc5-4cd4-8d87-937dcb4137c1-serving-cert\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.047510 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kv2w\" (UniqueName: \"kubernetes.io/projected/52af244c-8bc5-4cd4-8d87-937dcb4137c1-kube-api-access-4kv2w\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.047569 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/09020836-a0b8-4ad6-a270-91335b715695-metrics-tls\") pod \"ingress-operator-5b745b69d9-2r5rq\" (UID: \"09020836-a0b8-4ad6-a270-91335b715695\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.047624 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-audit-policies\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.047696 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-audit-dir\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.047749 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jt2pt\" (UniqueName: \"kubernetes.io/projected/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-kube-api-access-jt2pt\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.047999 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rk7nv\" (UID: \"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.048170 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8jf6\" (UniqueName: \"kubernetes.io/projected/d832b229-50b0-4f09-a892-eb36e39004fb-kube-api-access-p8jf6\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.048308 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-client-ca\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.048431 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.048554 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.048370 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-audit-dir\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.048496 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.048326 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-audit-policies\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.048857 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/52af244c-8bc5-4cd4-8d87-937dcb4137c1-etcd-ca\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.048989 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-machine-approver-tls\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.049105 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/09020836-a0b8-4ad6-a270-91335b715695-trusted-ca\") pod \"ingress-operator-5b745b69d9-2r5rq\" (UID: \"09020836-a0b8-4ad6-a270-91335b715695\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.049255 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/848a0af4-3fad-4f70-92d1-d96883ded7bc-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nkzjs\" (UID: \"848a0af4-3fad-4f70-92d1-d96883ded7bc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.049519 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f105472-b420-4bb3-877d-663d96eed1af-serving-cert\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.049648 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.049774 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8d4c7ce2-7724-494b-b86a-23627074ce45-images\") pod \"machine-api-operator-5694c8668f-bh4fj\" (UID: \"8d4c7ce2-7724-494b-b86a-23627074ce45\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.049880 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-config\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.049997 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-service-ca\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.050110 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.050252 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-serving-cert\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.050363 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/09020836-a0b8-4ad6-a270-91335b715695-bound-sa-token\") pod \"ingress-operator-5b745b69d9-2r5rq\" (UID: \"09020836-a0b8-4ad6-a270-91335b715695\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.050475 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8blz\" (UniqueName: \"kubernetes.io/projected/929478f4-2248-4064-8d89-d4a5a5ba5164-kube-api-access-l8blz\") pod \"openshift-controller-manager-operator-756b6f6bc6-9k6ph\" (UID: \"929478f4-2248-4064-8d89-d4a5a5ba5164\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.050597 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93cab3f2-1f4b-4716-a747-5dbf51b26b1d-config\") pod \"console-operator-58897d9998-w8n5n\" (UID: \"93cab3f2-1f4b-4716-a747-5dbf51b26b1d\") " pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.050704 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-encryption-config\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.050816 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/52af244c-8bc5-4cd4-8d87-937dcb4137c1-etcd-service-ca\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.050947 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93cab3f2-1f4b-4716-a747-5dbf51b26b1d-serving-cert\") pod \"console-operator-58897d9998-w8n5n\" (UID: \"93cab3f2-1f4b-4716-a747-5dbf51b26b1d\") " pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.051056 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/93cab3f2-1f4b-4716-a747-5dbf51b26b1d-trusted-ca\") pod \"console-operator-58897d9998-w8n5n\" (UID: \"93cab3f2-1f4b-4716-a747-5dbf51b26b1d\") " pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.051170 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv8lk\" (UniqueName: \"kubernetes.io/projected/848a0af4-3fad-4f70-92d1-d96883ded7bc-kube-api-access-kv8lk\") pod \"cluster-samples-operator-665b6dd947-nkzjs\" (UID: \"848a0af4-3fad-4f70-92d1-d96883ded7bc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.051285 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-auth-proxy-config\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.051531 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.051647 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6897af3a-9b39-4269-bcbd-e8a4496ae400-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-c4ssd\" (UID: \"6897af3a-9b39-4269-bcbd-e8a4496ae400\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.051117 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.052374 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.052430 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-config\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.052740 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk2bw\" (UniqueName: \"kubernetes.io/projected/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-kube-api-access-sk2bw\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.052862 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.052976 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-service-ca-bundle\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.052970 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.053015 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.053509 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.053667 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.054004 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.054329 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-encryption-config\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.055170 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-serving-cert\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.060910 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.065271 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vv2vn\" (UniqueName: \"kubernetes.io/projected/13eb0fde-8d53-4954-9cb1-5f1641cb0d14-kube-api-access-vv2vn\") pod \"migrator-59844c95c7-hd46v\" (UID: \"13eb0fde-8d53-4954-9cb1-5f1641cb0d14\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hd46v" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.065334 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/691aea14-6408-453c-b4c1-99e2760ab531-console-serving-cert\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.065376 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/403d803f-574a-47bc-a375-d06d37cb31d9-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2x2f8\" (UID: \"403d803f-574a-47bc-a375-d06d37cb31d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.065409 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vf2p\" (UniqueName: \"kubernetes.io/projected/403d803f-574a-47bc-a375-d06d37cb31d9-kube-api-access-7vf2p\") pod \"openshift-apiserver-operator-796bbdcf4f-2x2f8\" (UID: \"403d803f-574a-47bc-a375-d06d37cb31d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.065618 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rk7nv\" (UID: \"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.065665 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8bhn\" (UniqueName: \"kubernetes.io/projected/4f105472-b420-4bb3-877d-663d96eed1af-kube-api-access-d8bhn\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.065700 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl8bv\" (UniqueName: \"kubernetes.io/projected/8d4c7ce2-7724-494b-b86a-23627074ce45-kube-api-access-hl8bv\") pod \"machine-api-operator-5694c8668f-bh4fj\" (UID: \"8d4c7ce2-7724-494b-b86a-23627074ce45\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.065734 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.066909 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/403d803f-574a-47bc-a375-d06d37cb31d9-config\") pod \"openshift-apiserver-operator-796bbdcf4f-2x2f8\" (UID: \"403d803f-574a-47bc-a375-d06d37cb31d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.067986 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-config\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068033 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068067 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9p2nq\" (UniqueName: \"kubernetes.io/projected/09020836-a0b8-4ad6-a270-91335b715695-kube-api-access-9p2nq\") pod \"ingress-operator-5b745b69d9-2r5rq\" (UID: \"09020836-a0b8-4ad6-a270-91335b715695\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068097 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca52d832-2bf9-49ca-a601-fc00d355efa3-config\") pod \"kube-controller-manager-operator-78b949d7b-xkt4w\" (UID: \"ca52d832-2bf9-49ca-a601-fc00d355efa3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068131 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/929478f4-2248-4064-8d89-d4a5a5ba5164-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-9k6ph\" (UID: \"929478f4-2248-4064-8d89-d4a5a5ba5164\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068172 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-oauth-serving-cert\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068198 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/52af244c-8bc5-4cd4-8d87-937dcb4137c1-etcd-client\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068229 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068255 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6897af3a-9b39-4269-bcbd-e8a4496ae400-config\") pod \"kube-apiserver-operator-766d6c64bb-c4ssd\" (UID: \"6897af3a-9b39-4269-bcbd-e8a4496ae400\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068283 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/54fb6e13-86a5-45f3-8640-28735d6db34f-available-featuregates\") pod \"openshift-config-operator-7777fb866f-rwjj5\" (UID: \"54fb6e13-86a5-45f3-8640-28735d6db34f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068306 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5czxz\" (UniqueName: \"kubernetes.io/projected/09720fb9-ddb6-4a28-9187-5edbee74b5bd-kube-api-access-5czxz\") pod \"dns-operator-744455d44c-6tbm7\" (UID: \"09720fb9-ddb6-4a28-9187-5edbee74b5bd\") " pod="openshift-dns-operator/dns-operator-744455d44c-6tbm7" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068337 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d4c7ce2-7724-494b-b86a-23627074ce45-config\") pod \"machine-api-operator-5694c8668f-bh4fj\" (UID: \"8d4c7ce2-7724-494b-b86a-23627074ce45\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068359 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca52d832-2bf9-49ca-a601-fc00d355efa3-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xkt4w\" (UID: \"ca52d832-2bf9-49ca-a601-fc00d355efa3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068390 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/691aea14-6408-453c-b4c1-99e2760ab531-console-oauth-config\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068416 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52af244c-8bc5-4cd4-8d87-937dcb4137c1-config\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068444 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-config\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068469 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-etcd-client\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068494 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068519 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/929478f4-2248-4064-8d89-d4a5a5ba5164-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-9k6ph\" (UID: \"929478f4-2248-4064-8d89-d4a5a5ba5164\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068542 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6zv8z\" (UniqueName: \"kubernetes.io/projected/691aea14-6408-453c-b4c1-99e2760ab531-kube-api-access-6zv8z\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068601 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/403d803f-574a-47bc-a375-d06d37cb31d9-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2x2f8\" (UID: \"403d803f-574a-47bc-a375-d06d37cb31d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068639 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn564\" (UniqueName: \"kubernetes.io/projected/34ea9a7e-c915-4142-8718-df09afb6e362-kube-api-access-xn564\") pod \"downloads-7954f5f757-p8gsl\" (UID: \"34ea9a7e-c915-4142-8718-df09afb6e362\") " pod="openshift-console/downloads-7954f5f757-p8gsl" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068663 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-trusted-ca-bundle\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068687 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7ptj\" (UniqueName: \"kubernetes.io/projected/93cab3f2-1f4b-4716-a747-5dbf51b26b1d-kube-api-access-s7ptj\") pod \"console-operator-58897d9998-w8n5n\" (UID: \"93cab3f2-1f4b-4716-a747-5dbf51b26b1d\") " pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068714 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-audit-policies\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068741 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlqgk\" (UniqueName: \"kubernetes.io/projected/cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d-kube-api-access-xlqgk\") pod \"cluster-image-registry-operator-dc59b4c8b-rk7nv\" (UID: \"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068768 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/54fb6e13-86a5-45f3-8640-28735d6db34f-serving-cert\") pod \"openshift-config-operator-7777fb866f-rwjj5\" (UID: \"54fb6e13-86a5-45f3-8640-28735d6db34f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068791 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rk7nv\" (UID: \"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068815 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/09720fb9-ddb6-4a28-9187-5edbee74b5bd-metrics-tls\") pod \"dns-operator-744455d44c-6tbm7\" (UID: \"09720fb9-ddb6-4a28-9187-5edbee74b5bd\") " pod="openshift-dns-operator/dns-operator-744455d44c-6tbm7" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.068845 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x52p\" (UniqueName: \"kubernetes.io/projected/b02dee03-310b-4a43-b1ad-de5efd1031a3-kube-api-access-8x52p\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.069397 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.069491 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-k66n9"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.069853 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-w8n5n"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.070486 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/54fb6e13-86a5-45f3-8640-28735d6db34f-available-featuregates\") pod \"openshift-config-operator-7777fb866f-rwjj5\" (UID: \"54fb6e13-86a5-45f3-8640-28735d6db34f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.071131 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-audit-policies\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.071958 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.072165 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.072175 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.072800 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-etcd-client\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.072925 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.073903 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.074209 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/54fb6e13-86a5-45f3-8640-28735d6db34f-serving-cert\") pod \"openshift-config-operator-7777fb866f-rwjj5\" (UID: \"54fb6e13-86a5-45f3-8640-28735d6db34f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.074689 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.075556 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/403d803f-574a-47bc-a375-d06d37cb31d9-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-2x2f8\" (UID: \"403d803f-574a-47bc-a375-d06d37cb31d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.076710 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.077638 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.078404 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.078667 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-q84b6"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.079222 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.081165 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rj5rw"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.081909 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.082117 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.083632 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6tbm7"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.086027 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.087342 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.088506 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-p8gsl"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.092322 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-bh4fj"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.093675 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.093853 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9z6x6"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.094815 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.096061 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bzhst"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.097474 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-sp47z"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.099171 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.100360 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-wdf6s"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.100741 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.101343 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.102464 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.102862 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.104527 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.105959 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.107180 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.107301 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.108523 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.109648 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9qkfb"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.111350 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.112681 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.114024 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.114206 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.115374 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-hd46v"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.118184 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.119570 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.121003 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-z7tw9"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.122663 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.124065 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-tfh59"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.124641 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-tfh59" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.125733 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-jpmmc"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.127961 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.128047 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-jpmmc" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.128375 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xtdn4"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.129354 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.130782 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-wdf6s"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.131817 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-q84b6"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.132971 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rj5rw"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.134003 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.134134 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.138980 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-tfh59"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.140768 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ngqbv"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.143755 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-5kg8b"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.144127 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.146209 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-5kg8b"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.146324 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.147033 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ngqbv"] Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.154790 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.169713 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-client-ca\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.169762 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8jf6\" (UniqueName: \"kubernetes.io/projected/d832b229-50b0-4f09-a892-eb36e39004fb-kube-api-access-p8jf6\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.169798 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/52af244c-8bc5-4cd4-8d87-937dcb4137c1-etcd-ca\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.169818 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/848a0af4-3fad-4f70-92d1-d96883ded7bc-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nkzjs\" (UID: \"848a0af4-3fad-4f70-92d1-d96883ded7bc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.169833 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/09020836-a0b8-4ad6-a270-91335b715695-trusted-ca\") pod \"ingress-operator-5b745b69d9-2r5rq\" (UID: \"09020836-a0b8-4ad6-a270-91335b715695\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.169855 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8d4c7ce2-7724-494b-b86a-23627074ce45-images\") pod \"machine-api-operator-5694c8668f-bh4fj\" (UID: \"8d4c7ce2-7724-494b-b86a-23627074ce45\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.169874 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-service-ca\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.169889 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/09020836-a0b8-4ad6-a270-91335b715695-bound-sa-token\") pod \"ingress-operator-5b745b69d9-2r5rq\" (UID: \"09020836-a0b8-4ad6-a270-91335b715695\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.169903 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8blz\" (UniqueName: \"kubernetes.io/projected/929478f4-2248-4064-8d89-d4a5a5ba5164-kube-api-access-l8blz\") pod \"openshift-controller-manager-operator-756b6f6bc6-9k6ph\" (UID: \"929478f4-2248-4064-8d89-d4a5a5ba5164\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.169921 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93cab3f2-1f4b-4716-a747-5dbf51b26b1d-config\") pod \"console-operator-58897d9998-w8n5n\" (UID: \"93cab3f2-1f4b-4716-a747-5dbf51b26b1d\") " pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.169937 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/52af244c-8bc5-4cd4-8d87-937dcb4137c1-etcd-service-ca\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.169988 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93cab3f2-1f4b-4716-a747-5dbf51b26b1d-serving-cert\") pod \"console-operator-58897d9998-w8n5n\" (UID: \"93cab3f2-1f4b-4716-a747-5dbf51b26b1d\") " pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170003 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/93cab3f2-1f4b-4716-a747-5dbf51b26b1d-trusted-ca\") pod \"console-operator-58897d9998-w8n5n\" (UID: \"93cab3f2-1f4b-4716-a747-5dbf51b26b1d\") " pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170018 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv8lk\" (UniqueName: \"kubernetes.io/projected/848a0af4-3fad-4f70-92d1-d96883ded7bc-kube-api-access-kv8lk\") pod \"cluster-samples-operator-665b6dd947-nkzjs\" (UID: \"848a0af4-3fad-4f70-92d1-d96883ded7bc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170034 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6897af3a-9b39-4269-bcbd-e8a4496ae400-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-c4ssd\" (UID: \"6897af3a-9b39-4269-bcbd-e8a4496ae400\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170054 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-config\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170081 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rk7nv\" (UID: \"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170097 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vv2vn\" (UniqueName: \"kubernetes.io/projected/13eb0fde-8d53-4954-9cb1-5f1641cb0d14-kube-api-access-vv2vn\") pod \"migrator-59844c95c7-hd46v\" (UID: \"13eb0fde-8d53-4954-9cb1-5f1641cb0d14\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hd46v" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170111 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/691aea14-6408-453c-b4c1-99e2760ab531-console-serving-cert\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170128 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl8bv\" (UniqueName: \"kubernetes.io/projected/8d4c7ce2-7724-494b-b86a-23627074ce45-kube-api-access-hl8bv\") pod \"machine-api-operator-5694c8668f-bh4fj\" (UID: \"8d4c7ce2-7724-494b-b86a-23627074ce45\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170199 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9p2nq\" (UniqueName: \"kubernetes.io/projected/09020836-a0b8-4ad6-a270-91335b715695-kube-api-access-9p2nq\") pod \"ingress-operator-5b745b69d9-2r5rq\" (UID: \"09020836-a0b8-4ad6-a270-91335b715695\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170221 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca52d832-2bf9-49ca-a601-fc00d355efa3-config\") pod \"kube-controller-manager-operator-78b949d7b-xkt4w\" (UID: \"ca52d832-2bf9-49ca-a601-fc00d355efa3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170238 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/929478f4-2248-4064-8d89-d4a5a5ba5164-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-9k6ph\" (UID: \"929478f4-2248-4064-8d89-d4a5a5ba5164\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170259 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6897af3a-9b39-4269-bcbd-e8a4496ae400-config\") pod \"kube-apiserver-operator-766d6c64bb-c4ssd\" (UID: \"6897af3a-9b39-4269-bcbd-e8a4496ae400\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170282 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-oauth-serving-cert\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170329 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/52af244c-8bc5-4cd4-8d87-937dcb4137c1-etcd-client\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170355 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5czxz\" (UniqueName: \"kubernetes.io/projected/09720fb9-ddb6-4a28-9187-5edbee74b5bd-kube-api-access-5czxz\") pod \"dns-operator-744455d44c-6tbm7\" (UID: \"09720fb9-ddb6-4a28-9187-5edbee74b5bd\") " pod="openshift-dns-operator/dns-operator-744455d44c-6tbm7" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170381 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d4c7ce2-7724-494b-b86a-23627074ce45-config\") pod \"machine-api-operator-5694c8668f-bh4fj\" (UID: \"8d4c7ce2-7724-494b-b86a-23627074ce45\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170410 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca52d832-2bf9-49ca-a601-fc00d355efa3-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xkt4w\" (UID: \"ca52d832-2bf9-49ca-a601-fc00d355efa3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170442 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/691aea14-6408-453c-b4c1-99e2760ab531-console-oauth-config\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170480 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170501 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52af244c-8bc5-4cd4-8d87-937dcb4137c1-config\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170526 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/929478f4-2248-4064-8d89-d4a5a5ba5164-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-9k6ph\" (UID: \"929478f4-2248-4064-8d89-d4a5a5ba5164\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170548 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6zv8z\" (UniqueName: \"kubernetes.io/projected/691aea14-6408-453c-b4c1-99e2760ab531-kube-api-access-6zv8z\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170590 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-trusted-ca-bundle\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170602 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/52af244c-8bc5-4cd4-8d87-937dcb4137c1-etcd-ca\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170614 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7ptj\" (UniqueName: \"kubernetes.io/projected/93cab3f2-1f4b-4716-a747-5dbf51b26b1d-kube-api-access-s7ptj\") pod \"console-operator-58897d9998-w8n5n\" (UID: \"93cab3f2-1f4b-4716-a747-5dbf51b26b1d\") " pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170650 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn564\" (UniqueName: \"kubernetes.io/projected/34ea9a7e-c915-4142-8718-df09afb6e362-kube-api-access-xn564\") pod \"downloads-7954f5f757-p8gsl\" (UID: \"34ea9a7e-c915-4142-8718-df09afb6e362\") " pod="openshift-console/downloads-7954f5f757-p8gsl" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170672 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlqgk\" (UniqueName: \"kubernetes.io/projected/cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d-kube-api-access-xlqgk\") pod \"cluster-image-registry-operator-dc59b4c8b-rk7nv\" (UID: \"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170707 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rk7nv\" (UID: \"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170729 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/09720fb9-ddb6-4a28-9187-5edbee74b5bd-metrics-tls\") pod \"dns-operator-744455d44c-6tbm7\" (UID: \"09720fb9-ddb6-4a28-9187-5edbee74b5bd\") " pod="openshift-dns-operator/dns-operator-744455d44c-6tbm7" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170764 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d832b229-50b0-4f09-a892-eb36e39004fb-serving-cert\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170803 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6897af3a-9b39-4269-bcbd-e8a4496ae400-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-c4ssd\" (UID: \"6897af3a-9b39-4269-bcbd-e8a4496ae400\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170827 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ca52d832-2bf9-49ca-a601-fc00d355efa3-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xkt4w\" (UID: \"ca52d832-2bf9-49ca-a601-fc00d355efa3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170857 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/8d4c7ce2-7724-494b-b86a-23627074ce45-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-bh4fj\" (UID: \"8d4c7ce2-7724-494b-b86a-23627074ce45\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170877 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52af244c-8bc5-4cd4-8d87-937dcb4137c1-serving-cert\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.170898 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kv2w\" (UniqueName: \"kubernetes.io/projected/52af244c-8bc5-4cd4-8d87-937dcb4137c1-kube-api-access-4kv2w\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.171466 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93cab3f2-1f4b-4716-a747-5dbf51b26b1d-config\") pod \"console-operator-58897d9998-w8n5n\" (UID: \"93cab3f2-1f4b-4716-a747-5dbf51b26b1d\") " pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.171489 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/8d4c7ce2-7724-494b-b86a-23627074ce45-images\") pod \"machine-api-operator-5694c8668f-bh4fj\" (UID: \"8d4c7ce2-7724-494b-b86a-23627074ce45\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.171511 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-console-config\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.171543 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/09020836-a0b8-4ad6-a270-91335b715695-metrics-tls\") pod \"ingress-operator-5b745b69d9-2r5rq\" (UID: \"09020836-a0b8-4ad6-a270-91335b715695\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.171607 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rk7nv\" (UID: \"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.171730 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/09020836-a0b8-4ad6-a270-91335b715695-trusted-ca\") pod \"ingress-operator-5b745b69d9-2r5rq\" (UID: \"09020836-a0b8-4ad6-a270-91335b715695\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.172031 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-config\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.172288 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/929478f4-2248-4064-8d89-d4a5a5ba5164-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-9k6ph\" (UID: \"929478f4-2248-4064-8d89-d4a5a5ba5164\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.172824 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d4c7ce2-7724-494b-b86a-23627074ce45-config\") pod \"machine-api-operator-5694c8668f-bh4fj\" (UID: \"8d4c7ce2-7724-494b-b86a-23627074ce45\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.173004 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.173070 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-rk7nv\" (UID: \"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.173251 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-client-ca\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.173553 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/52af244c-8bc5-4cd4-8d87-937dcb4137c1-etcd-service-ca\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.173608 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-service-ca\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.173624 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52af244c-8bc5-4cd4-8d87-937dcb4137c1-config\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.174379 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/848a0af4-3fad-4f70-92d1-d96883ded7bc-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-nkzjs\" (UID: \"848a0af4-3fad-4f70-92d1-d96883ded7bc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.174723 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93cab3f2-1f4b-4716-a747-5dbf51b26b1d-serving-cert\") pod \"console-operator-58897d9998-w8n5n\" (UID: \"93cab3f2-1f4b-4716-a747-5dbf51b26b1d\") " pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.175274 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/93cab3f2-1f4b-4716-a747-5dbf51b26b1d-trusted-ca\") pod \"console-operator-58897d9998-w8n5n\" (UID: \"93cab3f2-1f4b-4716-a747-5dbf51b26b1d\") " pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.175807 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-trusted-ca-bundle\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.175900 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-oauth-serving-cert\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.176791 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52af244c-8bc5-4cd4-8d87-937dcb4137c1-serving-cert\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.177315 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/09720fb9-ddb6-4a28-9187-5edbee74b5bd-metrics-tls\") pod \"dns-operator-744455d44c-6tbm7\" (UID: \"09720fb9-ddb6-4a28-9187-5edbee74b5bd\") " pod="openshift-dns-operator/dns-operator-744455d44c-6tbm7" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.177388 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/52af244c-8bc5-4cd4-8d87-937dcb4137c1-etcd-client\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.177900 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d832b229-50b0-4f09-a892-eb36e39004fb-serving-cert\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.178576 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/8d4c7ce2-7724-494b-b86a-23627074ce45-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-bh4fj\" (UID: \"8d4c7ce2-7724-494b-b86a-23627074ce45\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.179609 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/691aea14-6408-453c-b4c1-99e2760ab531-console-oauth-config\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.179653 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/929478f4-2248-4064-8d89-d4a5a5ba5164-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-9k6ph\" (UID: \"929478f4-2248-4064-8d89-d4a5a5ba5164\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.179780 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-console-config\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.180162 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/09020836-a0b8-4ad6-a270-91335b715695-metrics-tls\") pod \"ingress-operator-5b745b69d9-2r5rq\" (UID: \"09020836-a0b8-4ad6-a270-91335b715695\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.180850 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/691aea14-6408-453c-b4c1-99e2760ab531-console-serving-cert\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.182548 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-rk7nv\" (UID: \"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.194751 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.203055 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6897af3a-9b39-4269-bcbd-e8a4496ae400-config\") pod \"kube-apiserver-operator-766d6c64bb-c4ssd\" (UID: \"6897af3a-9b39-4269-bcbd-e8a4496ae400\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.214350 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.233912 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.245544 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6897af3a-9b39-4269-bcbd-e8a4496ae400-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-c4ssd\" (UID: \"6897af3a-9b39-4269-bcbd-e8a4496ae400\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.256338 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.274756 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.295071 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.305440 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ca52d832-2bf9-49ca-a601-fc00d355efa3-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-xkt4w\" (UID: \"ca52d832-2bf9-49ca-a601-fc00d355efa3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.314439 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.333975 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.354533 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.373792 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.383058 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ca52d832-2bf9-49ca-a601-fc00d355efa3-config\") pod \"kube-controller-manager-operator-78b949d7b-xkt4w\" (UID: \"ca52d832-2bf9-49ca-a601-fc00d355efa3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.393422 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.434535 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.455003 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.474853 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.495065 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.515205 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.534881 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.554737 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.575248 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.594633 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.615250 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.634673 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.657239 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.675234 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.694626 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.714913 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.734556 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.755883 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.774286 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.794462 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.814615 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.835842 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.855193 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.875940 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.896916 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.915083 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.934996 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.955841 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.975393 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 19:16:11 crc kubenswrapper[4982]: I1205 19:16:11.995336 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.015007 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.043625 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.046664 4982 secret.go:188] Couldn't get secret openshift-authentication-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.046666 4982 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.046807 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b02dee03-310b-4a43-b1ad-de5efd1031a3-serving-cert podName:b02dee03-310b-4a43-b1ad-de5efd1031a3 nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.546776976 +0000 UTC m=+151.428663011 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/b02dee03-310b-4a43-b1ad-de5efd1031a3-serving-cert") pod "authentication-operator-69f744f599-59w99" (UID: "b02dee03-310b-4a43-b1ad-de5efd1031a3") : failed to sync secret cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.046961 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-client-ca podName:4f105472-b420-4bb3-877d-663d96eed1af nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.546919559 +0000 UTC m=+151.428805654 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-client-ca") pod "route-controller-manager-6576b87f9c-qsnxp" (UID: "4f105472-b420-4bb3-877d-663d96eed1af") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.047045 4982 configmap.go:193] Couldn't get configMap openshift-authentication-operator/trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.047114 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-trusted-ca-bundle podName:b02dee03-310b-4a43-b1ad-de5efd1031a3 nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.547093283 +0000 UTC m=+151.428979418 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-trusted-ca-bundle") pod "authentication-operator-69f744f599-59w99" (UID: "b02dee03-310b-4a43-b1ad-de5efd1031a3") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.050088 4982 secret.go:188] Couldn't get secret openshift-route-controller-manager/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.050142 4982 secret.go:188] Couldn't get secret openshift-cluster-machine-approver/machine-approver-tls: failed to sync secret cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.050215 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4f105472-b420-4bb3-877d-663d96eed1af-serving-cert podName:4f105472-b420-4bb3-877d-663d96eed1af nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.550192875 +0000 UTC m=+151.432078910 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/4f105472-b420-4bb3-877d-663d96eed1af-serving-cert") pod "route-controller-manager-6576b87f9c-qsnxp" (UID: "4f105472-b420-4bb3-877d-663d96eed1af") : failed to sync secret cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.050432 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-machine-approver-tls podName:c2c6637f-1ebe-4343-8d20-9aa14df3cc2f nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.550399089 +0000 UTC m=+151.432285124 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "machine-approver-tls" (UniqueName: "kubernetes.io/secret/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-machine-approver-tls") pod "machine-approver-56656f9798-9vzmg" (UID: "c2c6637f-1ebe-4343-8d20-9aa14df3cc2f") : failed to sync secret cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.050483 4982 configmap.go:193] Couldn't get configMap openshift-cluster-machine-approver/machine-approver-config: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.050577 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-config podName:c2c6637f-1ebe-4343-8d20-9aa14df3cc2f nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.550551513 +0000 UTC m=+151.432437608 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-config") pod "machine-approver-56656f9798-9vzmg" (UID: "c2c6637f-1ebe-4343-8d20-9aa14df3cc2f") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.053357 4982 request.go:700] Waited for 1.008154605s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-apiserver/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.057448 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.067982 4982 configmap.go:193] Couldn't get configMap openshift-cluster-machine-approver/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.068072 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-auth-proxy-config podName:c2c6637f-1ebe-4343-8d20-9aa14df3cc2f nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.568049916 +0000 UTC m=+151.449935991 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "auth-proxy-config" (UniqueName: "kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-auth-proxy-config") pod "machine-approver-56656f9798-9vzmg" (UID: "c2c6637f-1ebe-4343-8d20-9aa14df3cc2f") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.067989 4982 configmap.go:193] Couldn't get configMap openshift-authentication-operator/service-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.068130 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-service-ca-bundle podName:b02dee03-310b-4a43-b1ad-de5efd1031a3 nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.568117267 +0000 UTC m=+151.450003302 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "service-ca-bundle" (UniqueName: "kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-service-ca-bundle") pod "authentication-operator-69f744f599-59w99" (UID: "b02dee03-310b-4a43-b1ad-de5efd1031a3") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.070249 4982 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/config: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.070333 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-config podName:4f105472-b420-4bb3-877d-663d96eed1af nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.570311158 +0000 UTC m=+151.452197193 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-config") pod "route-controller-manager-6576b87f9c-qsnxp" (UID: "4f105472-b420-4bb3-877d-663d96eed1af") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.071460 4982 configmap.go:193] Couldn't get configMap openshift-authentication-operator/authentication-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.071604 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-config podName:b02dee03-310b-4a43-b1ad-de5efd1031a3 nodeName:}" failed. No retries permitted until 2025-12-05 19:16:12.571565366 +0000 UTC m=+151.453451431 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-config") pod "authentication-operator-69f744f599-59w99" (UID: "b02dee03-310b-4a43-b1ad-de5efd1031a3") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.075340 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.095423 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.115340 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.134243 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.175423 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.175710 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsr2n\" (UniqueName: \"kubernetes.io/projected/a3ef803e-459d-4ade-abe6-e2201b265b09-kube-api-access-lsr2n\") pod \"oauth-openshift-558db77b4-9z6x6\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.183180 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.195549 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.236342 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xv69x\" (UniqueName: \"kubernetes.io/projected/54fb6e13-86a5-45f3-8640-28735d6db34f-kube-api-access-xv69x\") pod \"openshift-config-operator-7777fb866f-rwjj5\" (UID: \"54fb6e13-86a5-45f3-8640-28735d6db34f\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.237120 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.274262 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.275732 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jt2pt\" (UniqueName: \"kubernetes.io/projected/b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c-kube-api-access-jt2pt\") pod \"apiserver-7bbb656c7d-4xlkq\" (UID: \"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.330458 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vf2p\" (UniqueName: \"kubernetes.io/projected/403d803f-574a-47bc-a375-d06d37cb31d9-kube-api-access-7vf2p\") pod \"openshift-apiserver-operator-796bbdcf4f-2x2f8\" (UID: \"403d803f-574a-47bc-a375-d06d37cb31d9\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.375501 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.388808 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:12 crc kubenswrapper[4982]: E1205 19:16:12.388970 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:18:14.388943748 +0000 UTC m=+273.270829753 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.395409 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.395975 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9z6x6"] Dec 05 19:16:12 crc kubenswrapper[4982]: W1205 19:16:12.403600 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda3ef803e_459d_4ade_abe6_e2201b265b09.slice/crio-04fa63ea39454b188e4126dfbf434222c154d0c4827601b35102f68bbbfda83e WatchSource:0}: Error finding container 04fa63ea39454b188e4126dfbf434222c154d0c4827601b35102f68bbbfda83e: Status 404 returned error can't find the container with id 04fa63ea39454b188e4126dfbf434222c154d0c4827601b35102f68bbbfda83e Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.413877 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.435681 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.443535 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.453931 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.475570 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.490535 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.490641 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.490662 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.490700 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.491864 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.494317 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.494549 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.494581 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.496652 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.496896 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.514596 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.516400 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.534708 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.554763 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.556707 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.556755 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.574502 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.591979 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-service-ca-bundle\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.592028 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-config\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.592071 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-config\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.592127 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-client-ca\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.592142 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b02dee03-310b-4a43-b1ad-de5efd1031a3-serving-cert\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.592191 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.592229 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-machine-approver-tls\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.592247 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f105472-b420-4bb3-877d-663d96eed1af-serving-cert\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.592265 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-config\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.592295 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-auth-proxy-config\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.594537 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.614802 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.619103 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq"] Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.623614 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.634750 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 19:16:12 crc kubenswrapper[4982]: W1205 19:16:12.635519 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9c8120d_5b91_4dd4_8d6c_dab96d4cb46c.slice/crio-36e7adc107d8c139e529e78e1379c63cdd14495e7e6d1057d81bc4addcab0ffd WatchSource:0}: Error finding container 36e7adc107d8c139e529e78e1379c63cdd14495e7e6d1057d81bc4addcab0ffd: Status 404 returned error can't find the container with id 36e7adc107d8c139e529e78e1379c63cdd14495e7e6d1057d81bc4addcab0ffd Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.635796 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.646036 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.662325 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.675763 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.691927 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8"] Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.695793 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.722688 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5"] Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.724782 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.741804 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.769447 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.778511 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.794610 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.816072 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.834738 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.854943 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.874698 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.894491 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.914208 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.934437 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.958654 4982 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.975229 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 19:16:12 crc kubenswrapper[4982]: I1205 19:16:12.994336 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.015759 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.035402 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.053522 4982 request.go:700] Waited for 1.883074692s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/serviceaccounts/openshift-controller-manager-sa/token Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.083425 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8jf6\" (UniqueName: \"kubernetes.io/projected/d832b229-50b0-4f09-a892-eb36e39004fb-kube-api-access-p8jf6\") pod \"controller-manager-879f6c89f-bzhst\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.104629 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv8lk\" (UniqueName: \"kubernetes.io/projected/848a0af4-3fad-4f70-92d1-d96883ded7bc-kube-api-access-kv8lk\") pod \"cluster-samples-operator-665b6dd947-nkzjs\" (UID: \"848a0af4-3fad-4f70-92d1-d96883ded7bc\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.123234 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.126574 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9p2nq\" (UniqueName: \"kubernetes.io/projected/09020836-a0b8-4ad6-a270-91335b715695-kube-api-access-9p2nq\") pod \"ingress-operator-5b745b69d9-2r5rq\" (UID: \"09020836-a0b8-4ad6-a270-91335b715695\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.137748 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/09020836-a0b8-4ad6-a270-91335b715695-bound-sa-token\") pod \"ingress-operator-5b745b69d9-2r5rq\" (UID: \"09020836-a0b8-4ad6-a270-91335b715695\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.153967 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.156018 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8blz\" (UniqueName: \"kubernetes.io/projected/929478f4-2248-4064-8d89-d4a5a5ba5164-kube-api-access-l8blz\") pod \"openshift-controller-manager-operator-756b6f6bc6-9k6ph\" (UID: \"929478f4-2248-4064-8d89-d4a5a5ba5164\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.173989 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6897af3a-9b39-4269-bcbd-e8a4496ae400-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-c4ssd\" (UID: \"6897af3a-9b39-4269-bcbd-e8a4496ae400\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.191249 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5czxz\" (UniqueName: \"kubernetes.io/projected/09720fb9-ddb6-4a28-9187-5edbee74b5bd-kube-api-access-5czxz\") pod \"dns-operator-744455d44c-6tbm7\" (UID: \"09720fb9-ddb6-4a28-9187-5edbee74b5bd\") " pod="openshift-dns-operator/dns-operator-744455d44c-6tbm7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.202722 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.212962 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6zv8z\" (UniqueName: \"kubernetes.io/projected/691aea14-6408-453c-b4c1-99e2760ab531-kube-api-access-6zv8z\") pod \"console-f9d7485db-k66n9\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.229993 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vv2vn\" (UniqueName: \"kubernetes.io/projected/13eb0fde-8d53-4954-9cb1-5f1641cb0d14-kube-api-access-vv2vn\") pod \"migrator-59844c95c7-hd46v\" (UID: \"13eb0fde-8d53-4954-9cb1-5f1641cb0d14\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hd46v" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.240565 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.249394 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-6tbm7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.252710 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.253255 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-rk7nv\" (UID: \"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.264477 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hd46v" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.293272 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" event={"ID":"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c","Type":"ContainerStarted","Data":"36e7adc107d8c139e529e78e1379c63cdd14495e7e6d1057d81bc4addcab0ffd"} Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.296360 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7ptj\" (UniqueName: \"kubernetes.io/projected/93cab3f2-1f4b-4716-a747-5dbf51b26b1d-kube-api-access-s7ptj\") pod \"console-operator-58897d9998-w8n5n\" (UID: \"93cab3f2-1f4b-4716-a747-5dbf51b26b1d\") " pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.296811 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"d08cae2e17ec1b54b7abbe974cfde32b84bb92bb82bece29d68b9d9ba2a99a87"} Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.298875 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"b207cf0a9e085c63d57a34cb6f33330bd6d4cd49f1e3b5fc53a8bbcafd55edc6"} Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.303871 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" event={"ID":"54fb6e13-86a5-45f3-8640-28735d6db34f","Type":"ContainerStarted","Data":"2cdfd9705666ed755da05a8612906093133f8dee95cf51ee000640612ff494ce"} Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.305376 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" event={"ID":"a3ef803e-459d-4ade-abe6-e2201b265b09","Type":"ContainerStarted","Data":"04fa63ea39454b188e4126dfbf434222c154d0c4827601b35102f68bbbfda83e"} Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.306051 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn564\" (UniqueName: \"kubernetes.io/projected/34ea9a7e-c915-4142-8718-df09afb6e362-kube-api-access-xn564\") pod \"downloads-7954f5f757-p8gsl\" (UID: \"34ea9a7e-c915-4142-8718-df09afb6e362\") " pod="openshift-console/downloads-7954f5f757-p8gsl" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.306900 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" event={"ID":"403d803f-574a-47bc-a375-d06d37cb31d9","Type":"ContainerStarted","Data":"2d79896fef83fd044c442530eab70fb63f869b3500e58b8c960c826365df406d"} Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.316805 4982 projected.go:288] Couldn't get configMap openshift-cluster-machine-approver/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.317602 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlqgk\" (UniqueName: \"kubernetes.io/projected/cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d-kube-api-access-xlqgk\") pod \"cluster-image-registry-operator-dc59b4c8b-rk7nv\" (UID: \"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.337322 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ca52d832-2bf9-49ca-a601-fc00d355efa3-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-xkt4w\" (UID: \"ca52d832-2bf9-49ca-a601-fc00d355efa3\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.351003 4982 projected.go:288] Couldn't get configMap openshift-authentication-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.352241 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kv2w\" (UniqueName: \"kubernetes.io/projected/52af244c-8bc5-4cd4-8d87-937dcb4137c1-kube-api-access-4kv2w\") pod \"etcd-operator-b45778765-9qkfb\" (UID: \"52af244c-8bc5-4cd4-8d87-937dcb4137c1\") " pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.353451 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-p8gsl" Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.371416 4982 projected.go:288] Couldn't get configMap openshift-route-controller-manager/openshift-service-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.371443 4982 projected.go:194] Error preparing data for projected volume kube-api-access-d8bhn for pod openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.371502 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/4f105472-b420-4bb3-877d-663d96eed1af-kube-api-access-d8bhn podName:4f105472-b420-4bb3-877d-663d96eed1af nodeName:}" failed. No retries permitted until 2025-12-05 19:16:13.871483922 +0000 UTC m=+152.753369917 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-d8bhn" (UniqueName: "kubernetes.io/projected/4f105472-b420-4bb3-877d-663d96eed1af-kube-api-access-d8bhn") pod "route-controller-manager-6576b87f9c-qsnxp" (UID: "4f105472-b420-4bb3-877d-663d96eed1af") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.382371 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl8bv\" (UniqueName: \"kubernetes.io/projected/8d4c7ce2-7724-494b-b86a-23627074ce45-kube-api-access-hl8bv\") pod \"machine-api-operator-5694c8668f-bh4fj\" (UID: \"8d4c7ce2-7724-494b-b86a-23627074ce45\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.400340 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zphfp\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-kube-api-access-zphfp\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.400402 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-registry-certificates\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.400452 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-ca-trust-extracted\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.400503 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-trusted-ca\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.400587 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-installation-pull-secrets\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.400622 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.400648 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-registry-tls\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.400667 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-bound-sa-token\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.401024 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:13.901012172 +0000 UTC m=+152.782898167 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.418732 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.424702 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-config\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.434704 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.438286 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.441127 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.458092 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.461420 4982 projected.go:194] Error preparing data for projected volume kube-api-access-8x52p for pod openshift-authentication-operator/authentication-operator-69f744f599-59w99: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.461587 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/b02dee03-310b-4a43-b1ad-de5efd1031a3-kube-api-access-8x52p podName:b02dee03-310b-4a43-b1ad-de5efd1031a3 nodeName:}" failed. No retries permitted until 2025-12-05 19:16:13.961553375 +0000 UTC m=+152.843439370 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-8x52p" (UniqueName: "kubernetes.io/projected/b02dee03-310b-4a43-b1ad-de5efd1031a3-kube-api-access-8x52p") pod "authentication-operator-69f744f599-59w99" (UID: "b02dee03-310b-4a43-b1ad-de5efd1031a3") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.476305 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.486594 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-config\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.495252 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.502679 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503008 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/47f30033-f3ed-45d5-ad06-4ed3ac9a4db6-signing-cabundle\") pod \"service-ca-9c57cc56f-rj5rw\" (UID: \"47f30033-f3ed-45d5-ad06-4ed3ac9a4db6\") " pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503044 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9z8gr\" (UniqueName: \"kubernetes.io/projected/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-kube-api-access-9z8gr\") pod \"marketplace-operator-79b997595-wdf6s\" (UID: \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\") " pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503104 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22crz\" (UniqueName: \"kubernetes.io/projected/2674c1ed-7389-410e-8720-82e3e9086952-kube-api-access-22crz\") pod \"machine-config-controller-84d6567774-n5kkt\" (UID: \"2674c1ed-7389-410e-8720-82e3e9086952\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503124 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbfdg\" (UniqueName: \"kubernetes.io/projected/416dafe6-da57-4f4c-b550-4ef07e293c90-kube-api-access-mbfdg\") pod \"machine-config-operator-74547568cd-vb9rv\" (UID: \"416dafe6-da57-4f4c-b550-4ef07e293c90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503216 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/47f30033-f3ed-45d5-ad06-4ed3ac9a4db6-signing-key\") pod \"service-ca-9c57cc56f-rj5rw\" (UID: \"47f30033-f3ed-45d5-ad06-4ed3ac9a4db6\") " pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503255 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2mtq\" (UniqueName: \"kubernetes.io/projected/87744d6f-b352-49ee-8978-c50c5ec247b4-kube-api-access-p2mtq\") pod \"kube-storage-version-migrator-operator-b67b599dd-l248k\" (UID: \"87744d6f-b352-49ee-8978-c50c5ec247b4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503280 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-trusted-ca\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503302 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbc4424a-a893-4cab-a2ed-d29155d30633-config\") pod \"service-ca-operator-777779d784-q84b6\" (UID: \"dbc4424a-a893-4cab-a2ed-d29155d30633\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503297 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-config\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503321 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9c78cdb7-3299-4b32-b043-77323397f4e7-apiservice-cert\") pod \"packageserver-d55dfcdfc-tj24q\" (UID: \"9c78cdb7-3299-4b32-b043-77323397f4e7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503415 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/115ca87a-0d80-431b-a1c9-9a013d387a73-serving-cert\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503435 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/25b3bfdc-d13e-4dfb-8a21-59aaa07c3871-srv-cert\") pod \"olm-operator-6b444d44fb-jdcq7\" (UID: \"25b3bfdc-d13e-4dfb-8a21-59aaa07c3871\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.503471 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.003447259 +0000 UTC m=+152.885333254 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503536 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3027be5b-ae8f-442c-a924-0dc7434e6b1c-profile-collector-cert\") pod \"catalog-operator-68c6474976-m7scf\" (UID: \"3027be5b-ae8f-442c-a924-0dc7434e6b1c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503565 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/115ca87a-0d80-431b-a1c9-9a013d387a73-etcd-client\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503652 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czjjv\" (UniqueName: \"kubernetes.io/projected/7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922-kube-api-access-czjjv\") pod \"dns-default-5kg8b\" (UID: \"7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922\") " pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.503985 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/416dafe6-da57-4f4c-b550-4ef07e293c90-images\") pod \"machine-config-operator-74547568cd-vb9rv\" (UID: \"416dafe6-da57-4f4c-b550-4ef07e293c90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.504011 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kq7th\" (UniqueName: \"kubernetes.io/projected/5f38bb1c-e651-4dbb-8ca6-f2245e296df9-kube-api-access-kq7th\") pod \"machine-config-server-jpmmc\" (UID: \"5f38bb1c-e651-4dbb-8ca6-f2245e296df9\") " pod="openshift-machine-config-operator/machine-config-server-jpmmc" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.505019 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/888914ad-2e3b-4013-886a-3e4eaf653ab7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-p9xfl\" (UID: \"888914ad-2e3b-4013-886a-3e4eaf653ab7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.505139 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-wdf6s\" (UID: \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\") " pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.505311 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922-metrics-tls\") pod \"dns-default-5kg8b\" (UID: \"7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922\") " pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.505354 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/115ca87a-0d80-431b-a1c9-9a013d387a73-node-pullsecrets\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.505377 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4k9d\" (UniqueName: \"kubernetes.io/projected/62fa1e5c-e287-40d9-b98e-2bd536b193ed-kube-api-access-k4k9d\") pod \"multus-admission-controller-857f4d67dd-xtdn4\" (UID: \"62fa1e5c-e287-40d9-b98e-2bd536b193ed\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xtdn4" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.505568 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-installation-pull-secrets\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.505661 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.505725 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c2bde128-2402-4be6-bbe8-ef3518e58045-secret-volume\") pod \"collect-profiles-29416035-vz5vw\" (UID: \"c2bde128-2402-4be6-bbe8-ef3518e58045\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.505993 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-trusted-ca\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.506270 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjhng\" (UniqueName: \"kubernetes.io/projected/9c78cdb7-3299-4b32-b043-77323397f4e7-kube-api-access-zjhng\") pod \"packageserver-d55dfcdfc-tj24q\" (UID: \"9c78cdb7-3299-4b32-b043-77323397f4e7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.506317 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n2sr\" (UniqueName: \"kubernetes.io/projected/25b3bfdc-d13e-4dfb-8a21-59aaa07c3871-kube-api-access-8n2sr\") pod \"olm-operator-6b444d44fb-jdcq7\" (UID: \"25b3bfdc-d13e-4dfb-8a21-59aaa07c3871\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.506335 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbc4424a-a893-4cab-a2ed-d29155d30633-serving-cert\") pod \"service-ca-operator-777779d784-q84b6\" (UID: \"dbc4424a-a893-4cab-a2ed-d29155d30633\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.506355 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-trusted-ca-bundle\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.506597 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/115ca87a-0d80-431b-a1c9-9a013d387a73-encryption-config\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.506620 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfgsf\" (UniqueName: \"kubernetes.io/projected/c2bde128-2402-4be6-bbe8-ef3518e58045-kube-api-access-lfgsf\") pod \"collect-profiles-29416035-vz5vw\" (UID: \"c2bde128-2402-4be6-bbe8-ef3518e58045\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.506700 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.006684433 +0000 UTC m=+152.888570648 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.506985 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-config\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.507023 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-registration-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.507274 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.507480 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zphfp\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-kube-api-access-zphfp\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.507764 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-etcd-serving-ca\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.507888 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5f38bb1c-e651-4dbb-8ca6-f2245e296df9-certs\") pod \"machine-config-server-jpmmc\" (UID: \"5f38bb1c-e651-4dbb-8ca6-f2245e296df9\") " pod="openshift-machine-config-operator/machine-config-server-jpmmc" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.508339 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-default-certificate\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.508387 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/888914ad-2e3b-4013-886a-3e4eaf653ab7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-p9xfl\" (UID: \"888914ad-2e3b-4013-886a-3e4eaf653ab7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.508403 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/115ca87a-0d80-431b-a1c9-9a013d387a73-audit-dir\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.508419 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87744d6f-b352-49ee-8978-c50c5ec247b4-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-l248k\" (UID: \"87744d6f-b352-49ee-8978-c50c5ec247b4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.508904 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5f38bb1c-e651-4dbb-8ca6-f2245e296df9-node-bootstrap-token\") pod \"machine-config-server-jpmmc\" (UID: \"5f38bb1c-e651-4dbb-8ca6-f2245e296df9\") " pod="openshift-machine-config-operator/machine-config-server-jpmmc" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.509768 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4487ea8f-6e7c-4963-8b8a-495beef6b634-cert\") pod \"ingress-canary-tfh59\" (UID: \"4487ea8f-6e7c-4963-8b8a-495beef6b634\") " pod="openshift-ingress-canary/ingress-canary-tfh59" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.509799 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/888914ad-2e3b-4013-886a-3e4eaf653ab7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-p9xfl\" (UID: \"888914ad-2e3b-4013-886a-3e4eaf653ab7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.510085 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-service-ca-bundle\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.510186 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c2bde128-2402-4be6-bbe8-ef3518e58045-config-volume\") pod \"collect-profiles-29416035-vz5vw\" (UID: \"c2bde128-2402-4be6-bbe8-ef3518e58045\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.510213 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-socket-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.510348 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-metrics-certs\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.510541 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-ca-trust-extracted\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.510571 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7c7z\" (UniqueName: \"kubernetes.io/projected/dbc4424a-a893-4cab-a2ed-d29155d30633-kube-api-access-q7c7z\") pod \"service-ca-operator-777779d784-q84b6\" (UID: \"dbc4424a-a893-4cab-a2ed-d29155d30633\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.510596 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2674c1ed-7389-410e-8720-82e3e9086952-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-n5kkt\" (UID: \"2674c1ed-7389-410e-8720-82e3e9086952\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.510639 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/26c02f43-04d0-49fd-a8c0-5e6856cef9f2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rj56h\" (UID: \"26c02f43-04d0-49fd-a8c0-5e6856cef9f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.510662 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-stats-auth\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.510729 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/416dafe6-da57-4f4c-b550-4ef07e293c90-proxy-tls\") pod \"machine-config-operator-74547568cd-vb9rv\" (UID: \"416dafe6-da57-4f4c-b550-4ef07e293c90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.510806 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9c78cdb7-3299-4b32-b043-77323397f4e7-webhook-cert\") pod \"packageserver-d55dfcdfc-tj24q\" (UID: \"9c78cdb7-3299-4b32-b043-77323397f4e7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.510983 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-ca-trust-extracted\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.511665 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/9c78cdb7-3299-4b32-b043-77323397f4e7-tmpfs\") pod \"packageserver-d55dfcdfc-tj24q\" (UID: \"9c78cdb7-3299-4b32-b043-77323397f4e7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.511717 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcb22\" (UniqueName: \"kubernetes.io/projected/26c02f43-04d0-49fd-a8c0-5e6856cef9f2-kube-api-access-kcb22\") pod \"package-server-manager-789f6589d5-rj56h\" (UID: \"26c02f43-04d0-49fd-a8c0-5e6856cef9f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.511748 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-wdf6s\" (UID: \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\") " pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.511807 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-csi-data-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.511904 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/416dafe6-da57-4f4c-b550-4ef07e293c90-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vb9rv\" (UID: \"416dafe6-da57-4f4c-b550-4ef07e293c90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.511960 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzxv6\" (UniqueName: \"kubernetes.io/projected/115ca87a-0d80-431b-a1c9-9a013d387a73-kube-api-access-jzxv6\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.512940 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-plugins-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.513096 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/72f8774d-c2a9-4489-9812-2b72525fe9d9-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-xctm7\" (UID: \"72f8774d-c2a9-4489-9812-2b72525fe9d9\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.513159 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/25b3bfdc-d13e-4dfb-8a21-59aaa07c3871-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jdcq7\" (UID: \"25b3bfdc-d13e-4dfb-8a21-59aaa07c3871\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.513186 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-mountpoint-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.514697 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-image-import-ca\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.514834 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fbbt\" (UniqueName: \"kubernetes.io/projected/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-kube-api-access-2fbbt\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.515578 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.515782 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-registry-tls\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.516077 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-bound-sa-token\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.516170 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3027be5b-ae8f-442c-a924-0dc7434e6b1c-srv-cert\") pod \"catalog-operator-68c6474976-m7scf\" (UID: \"3027be5b-ae8f-442c-a924-0dc7434e6b1c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.516218 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plwgh\" (UniqueName: \"kubernetes.io/projected/3027be5b-ae8f-442c-a924-0dc7434e6b1c-kube-api-access-plwgh\") pod \"catalog-operator-68c6474976-m7scf\" (UID: \"3027be5b-ae8f-442c-a924-0dc7434e6b1c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.522961 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-df4d6\" (UniqueName: \"kubernetes.io/projected/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-kube-api-access-df4d6\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.523734 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4w86\" (UniqueName: \"kubernetes.io/projected/47f30033-f3ed-45d5-ad06-4ed3ac9a4db6-kube-api-access-b4w86\") pod \"service-ca-9c57cc56f-rj5rw\" (UID: \"47f30033-f3ed-45d5-ad06-4ed3ac9a4db6\") " pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.523792 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7tpd\" (UniqueName: \"kubernetes.io/projected/4487ea8f-6e7c-4963-8b8a-495beef6b634-kube-api-access-w7tpd\") pod \"ingress-canary-tfh59\" (UID: \"4487ea8f-6e7c-4963-8b8a-495beef6b634\") " pod="openshift-ingress-canary/ingress-canary-tfh59" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.523831 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8sl7\" (UniqueName: \"kubernetes.io/projected/72f8774d-c2a9-4489-9812-2b72525fe9d9-kube-api-access-h8sl7\") pod \"control-plane-machine-set-operator-78cbb6b69f-xctm7\" (UID: \"72f8774d-c2a9-4489-9812-2b72525fe9d9\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.523867 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2674c1ed-7389-410e-8720-82e3e9086952-proxy-tls\") pod \"machine-config-controller-84d6567774-n5kkt\" (UID: \"2674c1ed-7389-410e-8720-82e3e9086952\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.523950 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-registry-certificates\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.523991 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-audit\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.524052 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922-config-volume\") pod \"dns-default-5kg8b\" (UID: \"7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922\") " pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.524088 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87744d6f-b352-49ee-8978-c50c5ec247b4-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-l248k\" (UID: \"87744d6f-b352-49ee-8978-c50c5ec247b4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.524121 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/62fa1e5c-e287-40d9-b98e-2bd536b193ed-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xtdn4\" (UID: \"62fa1e5c-e287-40d9-b98e-2bd536b193ed\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xtdn4" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.524477 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-installation-pull-secrets\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.527554 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.528858 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-registry-certificates\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.540809 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.541342 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.555260 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.557357 4982 projected.go:194] Error preparing data for projected volume kube-api-access-sk2bw for pod openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.557456 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-kube-api-access-sk2bw podName:c2c6637f-1ebe-4343-8d20-9aa14df3cc2f nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.057430551 +0000 UTC m=+152.939316546 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-sk2bw" (UniqueName: "kubernetes.io/projected/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-kube-api-access-sk2bw") pod "machine-approver-56656f9798-9vzmg" (UID: "c2c6637f-1ebe-4343-8d20-9aa14df3cc2f") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.559688 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.574211 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-registry-tls\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.576603 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.584499 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-service-ca-bundle\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.589139 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-machine-approver-tls\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.593070 4982 configmap.go:193] Couldn't get configMap openshift-authentication-operator/trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.593162 4982 configmap.go:193] Couldn't get configMap openshift-cluster-machine-approver/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.593211 4982 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.593096 4982 secret.go:188] Couldn't get secret openshift-route-controller-manager/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.593113 4982 secret.go:188] Couldn't get secret openshift-authentication-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.593227 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-auth-proxy-config podName:c2c6637f-1ebe-4343-8d20-9aa14df3cc2f nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.593201123 +0000 UTC m=+153.475087118 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "auth-proxy-config" (UniqueName: "kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-auth-proxy-config") pod "machine-approver-56656f9798-9vzmg" (UID: "c2c6637f-1ebe-4343-8d20-9aa14df3cc2f") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.593335 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-trusted-ca-bundle podName:b02dee03-310b-4a43-b1ad-de5efd1031a3 nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.593328476 +0000 UTC m=+153.475214471 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-trusted-ca-bundle") pod "authentication-operator-69f744f599-59w99" (UID: "b02dee03-310b-4a43-b1ad-de5efd1031a3") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.593353 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-client-ca podName:4f105472-b420-4bb3-877d-663d96eed1af nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.593345337 +0000 UTC m=+153.475231332 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-client-ca") pod "route-controller-manager-6576b87f9c-qsnxp" (UID: "4f105472-b420-4bb3-877d-663d96eed1af") : failed to sync configmap cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.593370 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4f105472-b420-4bb3-877d-663d96eed1af-serving-cert podName:4f105472-b420-4bb3-877d-663d96eed1af nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.593362697 +0000 UTC m=+153.475248692 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/4f105472-b420-4bb3-877d-663d96eed1af-serving-cert") pod "route-controller-manager-6576b87f9c-qsnxp" (UID: "4f105472-b420-4bb3-877d-663d96eed1af") : failed to sync secret cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.593384 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b02dee03-310b-4a43-b1ad-de5efd1031a3-serving-cert podName:b02dee03-310b-4a43-b1ad-de5efd1031a3 nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.593377888 +0000 UTC m=+153.475263883 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/b02dee03-310b-4a43-b1ad-de5efd1031a3-serving-cert") pod "authentication-operator-69f744f599-59w99" (UID: "b02dee03-310b-4a43-b1ad-de5efd1031a3") : failed to sync secret cache: timed out waiting for the condition Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.596455 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.620065 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625047 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.625173 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.125126898 +0000 UTC m=+153.007012883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625398 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/888914ad-2e3b-4013-886a-3e4eaf653ab7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-p9xfl\" (UID: \"888914ad-2e3b-4013-886a-3e4eaf653ab7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625439 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-service-ca-bundle\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625468 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c2bde128-2402-4be6-bbe8-ef3518e58045-config-volume\") pod \"collect-profiles-29416035-vz5vw\" (UID: \"c2bde128-2402-4be6-bbe8-ef3518e58045\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625487 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-socket-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625509 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-metrics-certs\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625538 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7c7z\" (UniqueName: \"kubernetes.io/projected/dbc4424a-a893-4cab-a2ed-d29155d30633-kube-api-access-q7c7z\") pod \"service-ca-operator-777779d784-q84b6\" (UID: \"dbc4424a-a893-4cab-a2ed-d29155d30633\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625558 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2674c1ed-7389-410e-8720-82e3e9086952-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-n5kkt\" (UID: \"2674c1ed-7389-410e-8720-82e3e9086952\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625577 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/26c02f43-04d0-49fd-a8c0-5e6856cef9f2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rj56h\" (UID: \"26c02f43-04d0-49fd-a8c0-5e6856cef9f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625595 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-stats-auth\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625620 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/416dafe6-da57-4f4c-b550-4ef07e293c90-proxy-tls\") pod \"machine-config-operator-74547568cd-vb9rv\" (UID: \"416dafe6-da57-4f4c-b550-4ef07e293c90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625640 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9c78cdb7-3299-4b32-b043-77323397f4e7-webhook-cert\") pod \"packageserver-d55dfcdfc-tj24q\" (UID: \"9c78cdb7-3299-4b32-b043-77323397f4e7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625692 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/9c78cdb7-3299-4b32-b043-77323397f4e7-tmpfs\") pod \"packageserver-d55dfcdfc-tj24q\" (UID: \"9c78cdb7-3299-4b32-b043-77323397f4e7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625712 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcb22\" (UniqueName: \"kubernetes.io/projected/26c02f43-04d0-49fd-a8c0-5e6856cef9f2-kube-api-access-kcb22\") pod \"package-server-manager-789f6589d5-rj56h\" (UID: \"26c02f43-04d0-49fd-a8c0-5e6856cef9f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625735 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-wdf6s\" (UID: \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\") " pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625753 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-csi-data-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625768 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/416dafe6-da57-4f4c-b550-4ef07e293c90-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vb9rv\" (UID: \"416dafe6-da57-4f4c-b550-4ef07e293c90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625788 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzxv6\" (UniqueName: \"kubernetes.io/projected/115ca87a-0d80-431b-a1c9-9a013d387a73-kube-api-access-jzxv6\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625804 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-plugins-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625834 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/72f8774d-c2a9-4489-9812-2b72525fe9d9-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-xctm7\" (UID: \"72f8774d-c2a9-4489-9812-2b72525fe9d9\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625856 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/25b3bfdc-d13e-4dfb-8a21-59aaa07c3871-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jdcq7\" (UID: \"25b3bfdc-d13e-4dfb-8a21-59aaa07c3871\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625872 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-mountpoint-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625896 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-image-import-ca\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625916 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fbbt\" (UniqueName: \"kubernetes.io/projected/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-kube-api-access-2fbbt\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.625980 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3027be5b-ae8f-442c-a924-0dc7434e6b1c-srv-cert\") pod \"catalog-operator-68c6474976-m7scf\" (UID: \"3027be5b-ae8f-442c-a924-0dc7434e6b1c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626000 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plwgh\" (UniqueName: \"kubernetes.io/projected/3027be5b-ae8f-442c-a924-0dc7434e6b1c-kube-api-access-plwgh\") pod \"catalog-operator-68c6474976-m7scf\" (UID: \"3027be5b-ae8f-442c-a924-0dc7434e6b1c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626019 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-df4d6\" (UniqueName: \"kubernetes.io/projected/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-kube-api-access-df4d6\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626040 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8sl7\" (UniqueName: \"kubernetes.io/projected/72f8774d-c2a9-4489-9812-2b72525fe9d9-kube-api-access-h8sl7\") pod \"control-plane-machine-set-operator-78cbb6b69f-xctm7\" (UID: \"72f8774d-c2a9-4489-9812-2b72525fe9d9\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626068 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2674c1ed-7389-410e-8720-82e3e9086952-proxy-tls\") pod \"machine-config-controller-84d6567774-n5kkt\" (UID: \"2674c1ed-7389-410e-8720-82e3e9086952\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626092 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4w86\" (UniqueName: \"kubernetes.io/projected/47f30033-f3ed-45d5-ad06-4ed3ac9a4db6-kube-api-access-b4w86\") pod \"service-ca-9c57cc56f-rj5rw\" (UID: \"47f30033-f3ed-45d5-ad06-4ed3ac9a4db6\") " pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626115 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7tpd\" (UniqueName: \"kubernetes.io/projected/4487ea8f-6e7c-4963-8b8a-495beef6b634-kube-api-access-w7tpd\") pod \"ingress-canary-tfh59\" (UID: \"4487ea8f-6e7c-4963-8b8a-495beef6b634\") " pod="openshift-ingress-canary/ingress-canary-tfh59" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626140 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-audit\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626209 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922-config-volume\") pod \"dns-default-5kg8b\" (UID: \"7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922\") " pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626229 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87744d6f-b352-49ee-8978-c50c5ec247b4-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-l248k\" (UID: \"87744d6f-b352-49ee-8978-c50c5ec247b4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626250 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/62fa1e5c-e287-40d9-b98e-2bd536b193ed-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xtdn4\" (UID: \"62fa1e5c-e287-40d9-b98e-2bd536b193ed\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xtdn4" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626276 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/47f30033-f3ed-45d5-ad06-4ed3ac9a4db6-signing-cabundle\") pod \"service-ca-9c57cc56f-rj5rw\" (UID: \"47f30033-f3ed-45d5-ad06-4ed3ac9a4db6\") " pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626300 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9z8gr\" (UniqueName: \"kubernetes.io/projected/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-kube-api-access-9z8gr\") pod \"marketplace-operator-79b997595-wdf6s\" (UID: \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\") " pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626326 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22crz\" (UniqueName: \"kubernetes.io/projected/2674c1ed-7389-410e-8720-82e3e9086952-kube-api-access-22crz\") pod \"machine-config-controller-84d6567774-n5kkt\" (UID: \"2674c1ed-7389-410e-8720-82e3e9086952\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626348 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbfdg\" (UniqueName: \"kubernetes.io/projected/416dafe6-da57-4f4c-b550-4ef07e293c90-kube-api-access-mbfdg\") pod \"machine-config-operator-74547568cd-vb9rv\" (UID: \"416dafe6-da57-4f4c-b550-4ef07e293c90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626368 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/47f30033-f3ed-45d5-ad06-4ed3ac9a4db6-signing-key\") pod \"service-ca-9c57cc56f-rj5rw\" (UID: \"47f30033-f3ed-45d5-ad06-4ed3ac9a4db6\") " pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626386 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2mtq\" (UniqueName: \"kubernetes.io/projected/87744d6f-b352-49ee-8978-c50c5ec247b4-kube-api-access-p2mtq\") pod \"kube-storage-version-migrator-operator-b67b599dd-l248k\" (UID: \"87744d6f-b352-49ee-8978-c50c5ec247b4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626410 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbc4424a-a893-4cab-a2ed-d29155d30633-config\") pod \"service-ca-operator-777779d784-q84b6\" (UID: \"dbc4424a-a893-4cab-a2ed-d29155d30633\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626437 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9c78cdb7-3299-4b32-b043-77323397f4e7-apiservice-cert\") pod \"packageserver-d55dfcdfc-tj24q\" (UID: \"9c78cdb7-3299-4b32-b043-77323397f4e7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626468 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/115ca87a-0d80-431b-a1c9-9a013d387a73-serving-cert\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626492 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/25b3bfdc-d13e-4dfb-8a21-59aaa07c3871-srv-cert\") pod \"olm-operator-6b444d44fb-jdcq7\" (UID: \"25b3bfdc-d13e-4dfb-8a21-59aaa07c3871\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626513 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3027be5b-ae8f-442c-a924-0dc7434e6b1c-profile-collector-cert\") pod \"catalog-operator-68c6474976-m7scf\" (UID: \"3027be5b-ae8f-442c-a924-0dc7434e6b1c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626529 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/115ca87a-0d80-431b-a1c9-9a013d387a73-etcd-client\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626550 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czjjv\" (UniqueName: \"kubernetes.io/projected/7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922-kube-api-access-czjjv\") pod \"dns-default-5kg8b\" (UID: \"7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922\") " pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626567 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/416dafe6-da57-4f4c-b550-4ef07e293c90-images\") pod \"machine-config-operator-74547568cd-vb9rv\" (UID: \"416dafe6-da57-4f4c-b550-4ef07e293c90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626587 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kq7th\" (UniqueName: \"kubernetes.io/projected/5f38bb1c-e651-4dbb-8ca6-f2245e296df9-kube-api-access-kq7th\") pod \"machine-config-server-jpmmc\" (UID: \"5f38bb1c-e651-4dbb-8ca6-f2245e296df9\") " pod="openshift-machine-config-operator/machine-config-server-jpmmc" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626607 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/888914ad-2e3b-4013-886a-3e4eaf653ab7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-p9xfl\" (UID: \"888914ad-2e3b-4013-886a-3e4eaf653ab7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626625 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-wdf6s\" (UID: \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\") " pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626650 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922-metrics-tls\") pod \"dns-default-5kg8b\" (UID: \"7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922\") " pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626668 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/115ca87a-0d80-431b-a1c9-9a013d387a73-node-pullsecrets\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626687 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4k9d\" (UniqueName: \"kubernetes.io/projected/62fa1e5c-e287-40d9-b98e-2bd536b193ed-kube-api-access-k4k9d\") pod \"multus-admission-controller-857f4d67dd-xtdn4\" (UID: \"62fa1e5c-e287-40d9-b98e-2bd536b193ed\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xtdn4" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626736 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626754 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c2bde128-2402-4be6-bbe8-ef3518e58045-secret-volume\") pod \"collect-profiles-29416035-vz5vw\" (UID: \"c2bde128-2402-4be6-bbe8-ef3518e58045\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626771 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n2sr\" (UniqueName: \"kubernetes.io/projected/25b3bfdc-d13e-4dfb-8a21-59aaa07c3871-kube-api-access-8n2sr\") pod \"olm-operator-6b444d44fb-jdcq7\" (UID: \"25b3bfdc-d13e-4dfb-8a21-59aaa07c3871\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626789 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjhng\" (UniqueName: \"kubernetes.io/projected/9c78cdb7-3299-4b32-b043-77323397f4e7-kube-api-access-zjhng\") pod \"packageserver-d55dfcdfc-tj24q\" (UID: \"9c78cdb7-3299-4b32-b043-77323397f4e7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626806 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbc4424a-a893-4cab-a2ed-d29155d30633-serving-cert\") pod \"service-ca-operator-777779d784-q84b6\" (UID: \"dbc4424a-a893-4cab-a2ed-d29155d30633\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626822 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-trusted-ca-bundle\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626837 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/115ca87a-0d80-431b-a1c9-9a013d387a73-encryption-config\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626857 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfgsf\" (UniqueName: \"kubernetes.io/projected/c2bde128-2402-4be6-bbe8-ef3518e58045-kube-api-access-lfgsf\") pod \"collect-profiles-29416035-vz5vw\" (UID: \"c2bde128-2402-4be6-bbe8-ef3518e58045\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626873 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-registration-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626893 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-config\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626914 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-etcd-serving-ca\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626934 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5f38bb1c-e651-4dbb-8ca6-f2245e296df9-certs\") pod \"machine-config-server-jpmmc\" (UID: \"5f38bb1c-e651-4dbb-8ca6-f2245e296df9\") " pod="openshift-machine-config-operator/machine-config-server-jpmmc" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626952 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-default-certificate\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626970 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/888914ad-2e3b-4013-886a-3e4eaf653ab7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-p9xfl\" (UID: \"888914ad-2e3b-4013-886a-3e4eaf653ab7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626988 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/115ca87a-0d80-431b-a1c9-9a013d387a73-audit-dir\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.627005 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87744d6f-b352-49ee-8978-c50c5ec247b4-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-l248k\" (UID: \"87744d6f-b352-49ee-8978-c50c5ec247b4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.627025 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5f38bb1c-e651-4dbb-8ca6-f2245e296df9-node-bootstrap-token\") pod \"machine-config-server-jpmmc\" (UID: \"5f38bb1c-e651-4dbb-8ca6-f2245e296df9\") " pod="openshift-machine-config-operator/machine-config-server-jpmmc" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.627044 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4487ea8f-6e7c-4963-8b8a-495beef6b634-cert\") pod \"ingress-canary-tfh59\" (UID: \"4487ea8f-6e7c-4963-8b8a-495beef6b634\") " pod="openshift-ingress-canary/ingress-canary-tfh59" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.627042 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-wdf6s\" (UID: \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\") " pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.627252 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c2bde128-2402-4be6-bbe8-ef3518e58045-config-volume\") pod \"collect-profiles-29416035-vz5vw\" (UID: \"c2bde128-2402-4be6-bbe8-ef3518e58045\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.627476 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-socket-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.627895 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2674c1ed-7389-410e-8720-82e3e9086952-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-n5kkt\" (UID: \"2674c1ed-7389-410e-8720-82e3e9086952\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.628951 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-trusted-ca-bundle\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.629118 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/47f30033-f3ed-45d5-ad06-4ed3ac9a4db6-signing-cabundle\") pod \"service-ca-9c57cc56f-rj5rw\" (UID: \"47f30033-f3ed-45d5-ad06-4ed3ac9a4db6\") " pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.631173 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9c78cdb7-3299-4b32-b043-77323397f4e7-apiservice-cert\") pod \"packageserver-d55dfcdfc-tj24q\" (UID: \"9c78cdb7-3299-4b32-b043-77323397f4e7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.631472 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-audit\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.631588 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/115ca87a-0d80-431b-a1c9-9a013d387a73-audit-dir\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.632009 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbc4424a-a893-4cab-a2ed-d29155d30633-config\") pod \"service-ca-operator-777779d784-q84b6\" (UID: \"dbc4424a-a893-4cab-a2ed-d29155d30633\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.626410 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-service-ca-bundle\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.632363 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922-config-volume\") pod \"dns-default-5kg8b\" (UID: \"7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922\") " pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.632375 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/87744d6f-b352-49ee-8978-c50c5ec247b4-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-l248k\" (UID: \"87744d6f-b352-49ee-8978-c50c5ec247b4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.632889 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-etcd-serving-ca\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.633258 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-metrics-certs\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.633465 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbc4424a-a893-4cab-a2ed-d29155d30633-serving-cert\") pod \"service-ca-operator-777779d784-q84b6\" (UID: \"dbc4424a-a893-4cab-a2ed-d29155d30633\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.633517 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-plugins-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.633545 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/888914ad-2e3b-4013-886a-3e4eaf653ab7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-p9xfl\" (UID: \"888914ad-2e3b-4013-886a-3e4eaf653ab7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.633776 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5f38bb1c-e651-4dbb-8ca6-f2245e296df9-certs\") pod \"machine-config-server-jpmmc\" (UID: \"5f38bb1c-e651-4dbb-8ca6-f2245e296df9\") " pod="openshift-machine-config-operator/machine-config-server-jpmmc" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.633968 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-registration-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.634132 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-csi-data-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.636161 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/416dafe6-da57-4f4c-b550-4ef07e293c90-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vb9rv\" (UID: \"416dafe6-da57-4f4c-b550-4ef07e293c90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.636605 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/416dafe6-da57-4f4c-b550-4ef07e293c90-images\") pod \"machine-config-operator-74547568cd-vb9rv\" (UID: \"416dafe6-da57-4f4c-b550-4ef07e293c90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.637006 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.637230 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/115ca87a-0d80-431b-a1c9-9a013d387a73-node-pullsecrets\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.637757 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.137739898 +0000 UTC m=+153.019625883 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.638505 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-config\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.638550 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/9c78cdb7-3299-4b32-b043-77323397f4e7-tmpfs\") pod \"packageserver-d55dfcdfc-tj24q\" (UID: \"9c78cdb7-3299-4b32-b043-77323397f4e7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.639205 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/26c02f43-04d0-49fd-a8c0-5e6856cef9f2-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rj56h\" (UID: \"26c02f43-04d0-49fd-a8c0-5e6856cef9f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.639550 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/888914ad-2e3b-4013-886a-3e4eaf653ab7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-p9xfl\" (UID: \"888914ad-2e3b-4013-886a-3e4eaf653ab7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.639602 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-mountpoint-dir\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.641187 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-wdf6s\" (UID: \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\") " pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.641279 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/62fa1e5c-e287-40d9-b98e-2bd536b193ed-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xtdn4\" (UID: \"62fa1e5c-e287-40d9-b98e-2bd536b193ed\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xtdn4" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.641532 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/115ca87a-0d80-431b-a1c9-9a013d387a73-image-import-ca\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.646504 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/115ca87a-0d80-431b-a1c9-9a013d387a73-etcd-client\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.647568 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/72f8774d-c2a9-4489-9812-2b72525fe9d9-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-xctm7\" (UID: \"72f8774d-c2a9-4489-9812-2b72525fe9d9\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.649497 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9c78cdb7-3299-4b32-b043-77323397f4e7-webhook-cert\") pod \"packageserver-d55dfcdfc-tj24q\" (UID: \"9c78cdb7-3299-4b32-b043-77323397f4e7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.649855 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/87744d6f-b352-49ee-8978-c50c5ec247b4-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-l248k\" (UID: \"87744d6f-b352-49ee-8978-c50c5ec247b4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.650357 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922-metrics-tls\") pod \"dns-default-5kg8b\" (UID: \"7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922\") " pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.650622 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/416dafe6-da57-4f4c-b550-4ef07e293c90-proxy-tls\") pod \"machine-config-operator-74547568cd-vb9rv\" (UID: \"416dafe6-da57-4f4c-b550-4ef07e293c90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.651040 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/115ca87a-0d80-431b-a1c9-9a013d387a73-encryption-config\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.651279 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/115ca87a-0d80-431b-a1c9-9a013d387a73-serving-cert\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.651390 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-default-certificate\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.651704 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5f38bb1c-e651-4dbb-8ca6-f2245e296df9-node-bootstrap-token\") pod \"machine-config-server-jpmmc\" (UID: \"5f38bb1c-e651-4dbb-8ca6-f2245e296df9\") " pod="openshift-machine-config-operator/machine-config-server-jpmmc" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.651866 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/3027be5b-ae8f-442c-a924-0dc7434e6b1c-srv-cert\") pod \"catalog-operator-68c6474976-m7scf\" (UID: \"3027be5b-ae8f-442c-a924-0dc7434e6b1c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.651951 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/3027be5b-ae8f-442c-a924-0dc7434e6b1c-profile-collector-cert\") pod \"catalog-operator-68c6474976-m7scf\" (UID: \"3027be5b-ae8f-442c-a924-0dc7434e6b1c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.652191 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-stats-auth\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.652619 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/4487ea8f-6e7c-4963-8b8a-495beef6b634-cert\") pod \"ingress-canary-tfh59\" (UID: \"4487ea8f-6e7c-4963-8b8a-495beef6b634\") " pod="openshift-ingress-canary/ingress-canary-tfh59" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.653377 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/47f30033-f3ed-45d5-ad06-4ed3ac9a4db6-signing-key\") pod \"service-ca-9c57cc56f-rj5rw\" (UID: \"47f30033-f3ed-45d5-ad06-4ed3ac9a4db6\") " pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.654131 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.654674 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/25b3bfdc-d13e-4dfb-8a21-59aaa07c3871-srv-cert\") pod \"olm-operator-6b444d44fb-jdcq7\" (UID: \"25b3bfdc-d13e-4dfb-8a21-59aaa07c3871\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.654704 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c2bde128-2402-4be6-bbe8-ef3518e58045-secret-volume\") pod \"collect-profiles-29416035-vz5vw\" (UID: \"c2bde128-2402-4be6-bbe8-ef3518e58045\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.657615 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2674c1ed-7389-410e-8720-82e3e9086952-proxy-tls\") pod \"machine-config-controller-84d6567774-n5kkt\" (UID: \"2674c1ed-7389-410e-8720-82e3e9086952\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.658423 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/25b3bfdc-d13e-4dfb-8a21-59aaa07c3871-profile-collector-cert\") pod \"olm-operator-6b444d44fb-jdcq7\" (UID: \"25b3bfdc-d13e-4dfb-8a21-59aaa07c3871\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.677573 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.695076 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.717034 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.729558 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.730176 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.230138674 +0000 UTC m=+153.112024669 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.739003 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.763217 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.814301 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zphfp\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-kube-api-access-zphfp\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.840111 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.841000 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.340981004 +0000 UTC m=+153.222866999 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.845429 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd"] Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.850304 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-bound-sa-token\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.850377 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq"] Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.850858 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-hd46v"] Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.854234 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bzhst"] Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.883114 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7c7z\" (UniqueName: \"kubernetes.io/projected/dbc4424a-a893-4cab-a2ed-d29155d30633-kube-api-access-q7c7z\") pod \"service-ca-operator-777779d784-q84b6\" (UID: \"dbc4424a-a893-4cab-a2ed-d29155d30633\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.883143 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/888914ad-2e3b-4013-886a-3e4eaf653ab7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-p9xfl\" (UID: \"888914ad-2e3b-4013-886a-3e4eaf653ab7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.897764 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4w86\" (UniqueName: \"kubernetes.io/projected/47f30033-f3ed-45d5-ad06-4ed3ac9a4db6-kube-api-access-b4w86\") pod \"service-ca-9c57cc56f-rj5rw\" (UID: \"47f30033-f3ed-45d5-ad06-4ed3ac9a4db6\") " pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.911564 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7tpd\" (UniqueName: \"kubernetes.io/projected/4487ea8f-6e7c-4963-8b8a-495beef6b634-kube-api-access-w7tpd\") pod \"ingress-canary-tfh59\" (UID: \"4487ea8f-6e7c-4963-8b8a-495beef6b634\") " pod="openshift-ingress-canary/ingress-canary-tfh59" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.941275 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.941497 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.441456536 +0000 UTC m=+153.323342531 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.941620 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.941741 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8bhn\" (UniqueName: \"kubernetes.io/projected/4f105472-b420-4bb3-877d-663d96eed1af-kube-api-access-d8bhn\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:13 crc kubenswrapper[4982]: E1205 19:16:13.942162 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.442131171 +0000 UTC m=+153.324017166 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.951530 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9z8gr\" (UniqueName: \"kubernetes.io/projected/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-kube-api-access-9z8gr\") pod \"marketplace-operator-79b997595-wdf6s\" (UID: \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\") " pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.969577 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8bhn\" (UniqueName: \"kubernetes.io/projected/4f105472-b420-4bb3-877d-663d96eed1af-kube-api-access-d8bhn\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.972628 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfgsf\" (UniqueName: \"kubernetes.io/projected/c2bde128-2402-4be6-bbe8-ef3518e58045-kube-api-access-lfgsf\") pod \"collect-profiles-29416035-vz5vw\" (UID: \"c2bde128-2402-4be6-bbe8-ef3518e58045\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.972852 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.978079 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22crz\" (UniqueName: \"kubernetes.io/projected/2674c1ed-7389-410e-8720-82e3e9086952-kube-api-access-22crz\") pod \"machine-config-controller-84d6567774-n5kkt\" (UID: \"2674c1ed-7389-410e-8720-82e3e9086952\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.984011 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" Dec 05 19:16:13 crc kubenswrapper[4982]: I1205 19:16:13.991974 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2mtq\" (UniqueName: \"kubernetes.io/projected/87744d6f-b352-49ee-8978-c50c5ec247b4-kube-api-access-p2mtq\") pod \"kube-storage-version-migrator-operator-b67b599dd-l248k\" (UID: \"87744d6f-b352-49ee-8978-c50c5ec247b4\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.002707 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.010714 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbfdg\" (UniqueName: \"kubernetes.io/projected/416dafe6-da57-4f4c-b550-4ef07e293c90-kube-api-access-mbfdg\") pod \"machine-config-operator-74547568cd-vb9rv\" (UID: \"416dafe6-da57-4f4c-b550-4ef07e293c90\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.020230 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.027412 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-tfh59" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.038824 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fbbt\" (UniqueName: \"kubernetes.io/projected/a75c41a7-8e00-45d8-b5a7-5a19d4aa3114-kube-api-access-2fbbt\") pod \"router-default-5444994796-xzjmw\" (UID: \"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114\") " pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.043875 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.044569 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x52p\" (UniqueName: \"kubernetes.io/projected/b02dee03-310b-4a43-b1ad-de5efd1031a3-kube-api-access-8x52p\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:14 crc kubenswrapper[4982]: E1205 19:16:14.045114 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.54508792 +0000 UTC m=+153.426973965 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.048357 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x52p\" (UniqueName: \"kubernetes.io/projected/b02dee03-310b-4a43-b1ad-de5efd1031a3-kube-api-access-8x52p\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.067414 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8sl7\" (UniqueName: \"kubernetes.io/projected/72f8774d-c2a9-4489-9812-2b72525fe9d9-kube-api-access-h8sl7\") pod \"control-plane-machine-set-operator-78cbb6b69f-xctm7\" (UID: \"72f8774d-c2a9-4489-9812-2b72525fe9d9\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.083237 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-df4d6\" (UniqueName: \"kubernetes.io/projected/ee8c296e-5e20-47d2-b161-d5c610f7b6a9-kube-api-access-df4d6\") pod \"csi-hostpathplugin-ngqbv\" (UID: \"ee8c296e-5e20-47d2-b161-d5c610f7b6a9\") " pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.102719 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plwgh\" (UniqueName: \"kubernetes.io/projected/3027be5b-ae8f-442c-a924-0dc7434e6b1c-kube-api-access-plwgh\") pod \"catalog-operator-68c6474976-m7scf\" (UID: \"3027be5b-ae8f-442c-a924-0dc7434e6b1c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.109034 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph"] Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.122533 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-p8gsl"] Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.123001 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjhng\" (UniqueName: \"kubernetes.io/projected/9c78cdb7-3299-4b32-b043-77323397f4e7-kube-api-access-zjhng\") pod \"packageserver-d55dfcdfc-tj24q\" (UID: \"9c78cdb7-3299-4b32-b043-77323397f4e7\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.123491 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs"] Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.145300 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w"] Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.145955 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk2bw\" (UniqueName: \"kubernetes.io/projected/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-kube-api-access-sk2bw\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.146113 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:14 crc kubenswrapper[4982]: E1205 19:16:14.146462 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.646450702 +0000 UTC m=+153.528336697 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.148213 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6tbm7"] Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.160368 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk2bw\" (UniqueName: \"kubernetes.io/projected/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-kube-api-access-sk2bw\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.174381 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.176537 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4k9d\" (UniqueName: \"kubernetes.io/projected/62fa1e5c-e287-40d9-b98e-2bd536b193ed-kube-api-access-k4k9d\") pod \"multus-admission-controller-857f4d67dd-xtdn4\" (UID: \"62fa1e5c-e287-40d9-b98e-2bd536b193ed\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xtdn4" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.176966 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzxv6\" (UniqueName: \"kubernetes.io/projected/115ca87a-0d80-431b-a1c9-9a013d387a73-kube-api-access-jzxv6\") pod \"apiserver-76f77b778f-sp47z\" (UID: \"115ca87a-0d80-431b-a1c9-9a013d387a73\") " pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.187473 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n2sr\" (UniqueName: \"kubernetes.io/projected/25b3bfdc-d13e-4dfb-8a21-59aaa07c3871-kube-api-access-8n2sr\") pod \"olm-operator-6b444d44fb-jdcq7\" (UID: \"25b3bfdc-d13e-4dfb-8a21-59aaa07c3871\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.198685 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.206302 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.212576 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-xtdn4" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.214763 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kq7th\" (UniqueName: \"kubernetes.io/projected/5f38bb1c-e651-4dbb-8ca6-f2245e296df9-kube-api-access-kq7th\") pod \"machine-config-server-jpmmc\" (UID: \"5f38bb1c-e651-4dbb-8ca6-f2245e296df9\") " pod="openshift-machine-config-operator/machine-config-server-jpmmc" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.214769 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czjjv\" (UniqueName: \"kubernetes.io/projected/7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922-kube-api-access-czjjv\") pod \"dns-default-5kg8b\" (UID: \"7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922\") " pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.218652 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.239935 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.240774 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.241958 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.243274 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcb22\" (UniqueName: \"kubernetes.io/projected/26c02f43-04d0-49fd-a8c0-5e6856cef9f2-kube-api-access-kcb22\") pod \"package-server-manager-789f6589d5-rj56h\" (UID: \"26c02f43-04d0-49fd-a8c0-5e6856cef9f2\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" Dec 05 19:16:14 crc kubenswrapper[4982]: W1205 19:16:14.245707 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34ea9a7e_c915_4142_8718_df09afb6e362.slice/crio-cb8989807534c2b28b369df5ff8e42affbb1476d9c0a794c30342f2f84d05109 WatchSource:0}: Error finding container cb8989807534c2b28b369df5ff8e42affbb1476d9c0a794c30342f2f84d05109: Status 404 returned error can't find the container with id cb8989807534c2b28b369df5ff8e42affbb1476d9c0a794c30342f2f84d05109 Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.247293 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:14 crc kubenswrapper[4982]: E1205 19:16:14.247885 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.747872775 +0000 UTC m=+153.629758770 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.251522 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.265254 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.297023 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.311235 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.334763 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-jpmmc" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.357288 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:14 crc kubenswrapper[4982]: E1205 19:16:14.357607 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.85759585 +0000 UTC m=+153.739481845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.358081 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.358460 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.395424 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" event={"ID":"09020836-a0b8-4ad6-a270-91335b715695","Type":"ContainerStarted","Data":"6d17680c31a500945eb7c3ffaf42bf48c9783032734dffe85f53aae959707b3a"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.397340 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-w8n5n"] Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.398187 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hd46v" event={"ID":"13eb0fde-8d53-4954-9cb1-5f1641cb0d14","Type":"ContainerStarted","Data":"74dd5e16a8392c9d2bda9b06f870fd4b3103a5eff69ce264740a2f62c84fb94a"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.402133 4982 generic.go:334] "Generic (PLEG): container finished" podID="54fb6e13-86a5-45f3-8640-28735d6db34f" containerID="49864f438f8f06d2985c9ece5b49c3f1e9b0fc03ca62bf441d51612aca1ac823" exitCode=0 Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.402236 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" event={"ID":"54fb6e13-86a5-45f3-8640-28735d6db34f","Type":"ContainerDied","Data":"49864f438f8f06d2985c9ece5b49c3f1e9b0fc03ca62bf441d51612aca1ac823"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.407354 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-k66n9"] Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.409139 4982 generic.go:334] "Generic (PLEG): container finished" podID="b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c" containerID="9e2f6c96f330c4b0353a490abdf63ae72157b1c75d88673764c66afde37a38d6" exitCode=0 Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.409229 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" event={"ID":"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c","Type":"ContainerDied","Data":"9e2f6c96f330c4b0353a490abdf63ae72157b1c75d88673764c66afde37a38d6"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.419514 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" event={"ID":"d832b229-50b0-4f09-a892-eb36e39004fb","Type":"ContainerStarted","Data":"811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.420088 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" event={"ID":"d832b229-50b0-4f09-a892-eb36e39004fb","Type":"ContainerStarted","Data":"841f1e3b295b64f1c5deae6418b871b92026ccf6e622045674d30be414e2b7f4"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.420557 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.421633 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" event={"ID":"ca52d832-2bf9-49ca-a601-fc00d355efa3","Type":"ContainerStarted","Data":"d18e34678ffc921e8f6b8d0523732b51a2ac2a4aa41e8c954bf11db8f74807e7"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.422225 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6tbm7" event={"ID":"09720fb9-ddb6-4a28-9187-5edbee74b5bd","Type":"ContainerStarted","Data":"9eb04a4be0db30fcece09bd025f4cf40ccce5b79ba18f75a5265479d48df97ca"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.422881 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" event={"ID":"929478f4-2248-4064-8d89-d4a5a5ba5164","Type":"ContainerStarted","Data":"38d66cc632c6524c4ae28afabb27707c56c068f1406d7ca6e0b8218ae1295762"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.423294 4982 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-bzhst container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.423323 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" podUID="d832b229-50b0-4f09-a892-eb36e39004fb" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.21:8443/healthz\": dial tcp 10.217.0.21:8443: connect: connection refused" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.423896 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" event={"ID":"a3ef803e-459d-4ade-abe6-e2201b265b09","Type":"ContainerStarted","Data":"deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.424663 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.448256 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" event={"ID":"403d803f-574a-47bc-a375-d06d37cb31d9","Type":"ContainerStarted","Data":"7e1ba45d6d049a3d171abd719169654fca790e264459867618203a903e04e74c"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.450105 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.452604 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" event={"ID":"6897af3a-9b39-4269-bcbd-e8a4496ae400","Type":"ContainerStarted","Data":"913ecda8e08a0979a483e05c2065d026367fcb9f2f90f52ddfbded472f3bae13"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.453664 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"17df9454de80587b94cd9cfe4b53ecfb658c66a9ac611a1011fbbbcba62ba4c0"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.453699 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"634d03be53cf33d5e0704af2fe6fdb0bae423359d6f6012ab29f7281181725a7"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.458001 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:14 crc kubenswrapper[4982]: E1205 19:16:14.459352 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:14.959332551 +0000 UTC m=+153.841218576 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.463516 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"84ae199306176a0e22b82785f98aa15d8df62a6d6ef7a03cceb8e5aaee55e1e2"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.476979 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-p8gsl" event={"ID":"34ea9a7e-c915-4142-8718-df09afb6e362","Type":"ContainerStarted","Data":"cb8989807534c2b28b369df5ff8e42affbb1476d9c0a794c30342f2f84d05109"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.487204 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"3c3c9e370c4a0309e10f8244390475878980846da81db83d5d243db115ff1355"} Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.490071 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:16:14 crc kubenswrapper[4982]: W1205 19:16:14.490520 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93cab3f2_1f4b_4716_a747_5dbf51b26b1d.slice/crio-e174d76ff4b99e880429544a8e5d0138d039a183e9a0b3b5a6a1f18e158a3321 WatchSource:0}: Error finding container e174d76ff4b99e880429544a8e5d0138d039a183e9a0b3b5a6a1f18e158a3321: Status 404 returned error can't find the container with id e174d76ff4b99e880429544a8e5d0138d039a183e9a0b3b5a6a1f18e158a3321 Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.500821 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-9qkfb"] Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.510229 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv"] Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.532912 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-bh4fj"] Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.560398 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:14 crc kubenswrapper[4982]: E1205 19:16:14.560792 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:15.060774544 +0000 UTC m=+153.942660579 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.659685 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-q84b6"] Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.661545 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.661758 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-client-ca\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.661784 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b02dee03-310b-4a43-b1ad-de5efd1031a3-serving-cert\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.661844 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.661871 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f105472-b420-4bb3-877d-663d96eed1af-serving-cert\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.661890 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-auth-proxy-config\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.662688 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-client-ca\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:14 crc kubenswrapper[4982]: E1205 19:16:14.662794 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:15.162776251 +0000 UTC m=+154.044662296 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.668432 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c2c6637f-1ebe-4343-8d20-9aa14df3cc2f-auth-proxy-config\") pod \"machine-approver-56656f9798-9vzmg\" (UID: \"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.668840 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b02dee03-310b-4a43-b1ad-de5efd1031a3-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:14 crc kubenswrapper[4982]: W1205 19:16:14.668983 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8d4c7ce2_7724_494b_b86a_23627074ce45.slice/crio-e87a106618050a870befaf41f638371ec21990d52ffc797b8e5737eb834eafd5 WatchSource:0}: Error finding container e87a106618050a870befaf41f638371ec21990d52ffc797b8e5737eb834eafd5: Status 404 returned error can't find the container with id e87a106618050a870befaf41f638371ec21990d52ffc797b8e5737eb834eafd5 Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.681464 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b02dee03-310b-4a43-b1ad-de5efd1031a3-serving-cert\") pod \"authentication-operator-69f744f599-59w99\" (UID: \"b02dee03-310b-4a43-b1ad-de5efd1031a3\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.687437 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f105472-b420-4bb3-877d-663d96eed1af-serving-cert\") pod \"route-controller-manager-6576b87f9c-qsnxp\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:14 crc kubenswrapper[4982]: W1205 19:16:14.702096 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52af244c_8bc5_4cd4_8d87_937dcb4137c1.slice/crio-c00d6b7608e21edfa8c42ae393a5a4945876b4d1058aebc3bf00780d4c98722d WatchSource:0}: Error finding container c00d6b7608e21edfa8c42ae393a5a4945876b4d1058aebc3bf00780d4c98722d: Status 404 returned error can't find the container with id c00d6b7608e21edfa8c42ae393a5a4945876b4d1058aebc3bf00780d4c98722d Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.772212 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:14 crc kubenswrapper[4982]: E1205 19:16:14.772834 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:15.272818383 +0000 UTC m=+154.154704378 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.807142 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.809734 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-rj5rw"] Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.819570 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.829428 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.873698 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:14 crc kubenswrapper[4982]: E1205 19:16:14.874091 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:15.374076343 +0000 UTC m=+154.255962338 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.935990 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-2x2f8" podStartSLOduration=125.935967686 podStartE2EDuration="2m5.935967686s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:14.934426701 +0000 UTC m=+153.816312696" watchObservedRunningTime="2025-12-05 19:16:14.935967686 +0000 UTC m=+153.817853681" Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.975695 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-tfh59"] Dec 05 19:16:14 crc kubenswrapper[4982]: E1205 19:16:14.976522 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:15.476507789 +0000 UTC m=+154.358393784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:14 crc kubenswrapper[4982]: I1205 19:16:14.975714 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:14.999686 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-wdf6s"] Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.034624 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw"] Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.078140 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:15 crc kubenswrapper[4982]: E1205 19:16:15.078486 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:15.578471475 +0000 UTC m=+154.460357470 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.083966 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl"] Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.098029 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k"] Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.110225 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf"] Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.180708 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:15 crc kubenswrapper[4982]: E1205 19:16:15.181056 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:15.681037035 +0000 UTC m=+154.562923030 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.281954 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:15 crc kubenswrapper[4982]: E1205 19:16:15.282843 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:15.782825637 +0000 UTC m=+154.664711632 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.329992 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" podStartSLOduration=126.329972411 podStartE2EDuration="2m6.329972411s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:15.291585088 +0000 UTC m=+154.173471093" watchObservedRunningTime="2025-12-05 19:16:15.329972411 +0000 UTC m=+154.211858406" Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.383760 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:15 crc kubenswrapper[4982]: E1205 19:16:15.384060 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:15.884048875 +0000 UTC m=+154.765934870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.485960 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:15 crc kubenswrapper[4982]: E1205 19:16:15.486864 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:15.98683864 +0000 UTC m=+154.868724635 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.588778 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:15 crc kubenswrapper[4982]: E1205 19:16:15.590415 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:16.090392813 +0000 UTC m=+154.972278808 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.601318 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-jpmmc" event={"ID":"5f38bb1c-e651-4dbb-8ca6-f2245e296df9","Type":"ContainerStarted","Data":"03ca2a7214c18976570b9ecd5fe826230e92678605b8145836296d1565e71820"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.644960 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-tfh59" event={"ID":"4487ea8f-6e7c-4963-8b8a-495beef6b634","Type":"ContainerStarted","Data":"ffae2440f156b126e0ea4cd28e4a922dd070868b2d44302cee396fecf615a030"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.647965 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" event={"ID":"c2bde128-2402-4be6-bbe8-ef3518e58045","Type":"ContainerStarted","Data":"0dd88c84ab40ba0cc221bb460a99a6a5e1f2c2cf63aeda5dee61d7f4a8f0d08b"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.656444 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-p8gsl" event={"ID":"34ea9a7e-c915-4142-8718-df09afb6e362","Type":"ContainerStarted","Data":"591c77422ec3d48853a08c04e9d404000b3689844bcfe27a6b89586738a9528d"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.659595 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-p8gsl" Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.669807 4982 patch_prober.go:28] interesting pod/downloads-7954f5f757-p8gsl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.669874 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-p8gsl" podUID="34ea9a7e-c915-4142-8718-df09afb6e362" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.670818 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" event={"ID":"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d","Type":"ContainerStarted","Data":"1e8576d99a512a0a41a461bb9cca2516bceab7a08d5e172dfc7fbc48f0050302"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.686001 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-xzjmw" event={"ID":"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114","Type":"ContainerStarted","Data":"b0f921d478d8520c7770b55313491a45fcf3c1d4872e3d57b5379b03cfc2177f"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.690571 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:15 crc kubenswrapper[4982]: E1205 19:16:15.690751 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:16.190721191 +0000 UTC m=+155.072607186 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.690833 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:15 crc kubenswrapper[4982]: E1205 19:16:15.692103 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:16.192093143 +0000 UTC m=+155.073979138 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.700947 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" event={"ID":"6897af3a-9b39-4269-bcbd-e8a4496ae400","Type":"ContainerStarted","Data":"1252727d21c37cc38833a3ac4280c12145536998b4cb93324d56ae213098671f"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.704128 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs" event={"ID":"848a0af4-3fad-4f70-92d1-d96883ded7bc","Type":"ContainerStarted","Data":"512fd0984a8ad4ad0753d67c3add6e80b1d6088c73f44faeb8a976fb6c805652"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.704232 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs" event={"ID":"848a0af4-3fad-4f70-92d1-d96883ded7bc","Type":"ContainerStarted","Data":"bc31ca738739eb22847ee2b3972f044c1f430f401be42bfc4e6591def3a9eb42"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.717138 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" event={"ID":"888914ad-2e3b-4013-886a-3e4eaf653ab7","Type":"ContainerStarted","Data":"680cccccf7538adc3fd89a608d9224e743751be4aac171622ffbf71982b3670f"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.723528 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" event={"ID":"8d4c7ce2-7724-494b-b86a-23627074ce45","Type":"ContainerStarted","Data":"e87a106618050a870befaf41f638371ec21990d52ffc797b8e5737eb834eafd5"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.728428 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" podStartSLOduration=126.728411628 podStartE2EDuration="2m6.728411628s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:15.727657681 +0000 UTC m=+154.609543686" watchObservedRunningTime="2025-12-05 19:16:15.728411628 +0000 UTC m=+154.610297623" Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.741038 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-w8n5n" event={"ID":"93cab3f2-1f4b-4716-a747-5dbf51b26b1d","Type":"ContainerStarted","Data":"ed41763001a8dd2d0a18eecad44b33624ff7595ba297f1fa8093e795faf0723c"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.741079 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-w8n5n" event={"ID":"93cab3f2-1f4b-4716-a747-5dbf51b26b1d","Type":"ContainerStarted","Data":"e174d76ff4b99e880429544a8e5d0138d039a183e9a0b3b5a6a1f18e158a3321"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.741625 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.755311 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" event={"ID":"09020836-a0b8-4ad6-a270-91335b715695","Type":"ContainerStarted","Data":"8c0cb03e6e430394b1aeb93232a9aa97cb346175e628bf57b03acd2547ae8ca2"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.755351 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" event={"ID":"09020836-a0b8-4ad6-a270-91335b715695","Type":"ContainerStarted","Data":"2e1c0426d0abd3f9848c8227c6d47122dc0e7cadb3d187f50fa2fd16f4c91e2f"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.758739 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" event={"ID":"87744d6f-b352-49ee-8978-c50c5ec247b4","Type":"ContainerStarted","Data":"939465b9d96799201cfd14902ea535aac422262dc9be4965bca4b545a7a2cfc4"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.765543 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" event={"ID":"929478f4-2248-4064-8d89-d4a5a5ba5164","Type":"ContainerStarted","Data":"143be4e93c347dc77d7f2972927bd07be921a2dffe52b395feb0a790b2172c9d"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.768465 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" event={"ID":"f0aef6cf-30ed-4fc5-b84d-585e4692afe9","Type":"ContainerStarted","Data":"1672a1e212f35d494c2ecde5da920a7ac1b502772de4137897728e787e4cbb46"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.779830 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" event={"ID":"47f30033-f3ed-45d5-ad06-4ed3ac9a4db6","Type":"ContainerStarted","Data":"2394a51dfa8bbce89e359206aa2115a68fec67b778acddb0508431e7f57a067d"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.791876 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hd46v" event={"ID":"13eb0fde-8d53-4954-9cb1-5f1641cb0d14","Type":"ContainerStarted","Data":"2f898480705d0377f90576a59de7828c7efdf6e7aae1aabb5f9c631cd0475cfc"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.792187 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:15 crc kubenswrapper[4982]: E1205 19:16:15.792384 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:16.29236915 +0000 UTC m=+155.174255145 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.792590 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:15 crc kubenswrapper[4982]: E1205 19:16:15.794827 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:16.294817896 +0000 UTC m=+155.176703891 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.802439 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" event={"ID":"52af244c-8bc5-4cd4-8d87-937dcb4137c1","Type":"ContainerStarted","Data":"c00d6b7608e21edfa8c42ae393a5a4945876b4d1058aebc3bf00780d4c98722d"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.815892 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-k66n9" event={"ID":"691aea14-6408-453c-b4c1-99e2760ab531","Type":"ContainerStarted","Data":"1812e682c1ca2b9dbaeabca50bc76a803282695470c33492fe8cf90b2245f879"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.822140 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" event={"ID":"dbc4424a-a893-4cab-a2ed-d29155d30633","Type":"ContainerStarted","Data":"2194c507a0a64aaaf4ecad38c44c04416e8e0988b25b21a3f45711ab0f151210"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.826939 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" event={"ID":"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f","Type":"ContainerStarted","Data":"5a978b87204c3043179e8f75bdb7b5e6cbd92efa1b8f4954106c7ced741249d3"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.837596 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" event={"ID":"3027be5b-ae8f-442c-a924-0dc7434e6b1c","Type":"ContainerStarted","Data":"69988a7c081be9dcdc8261c1448ea9f31dc24d4c03bbb8bb371df2113f41c2b5"} Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.869862 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.894272 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:15 crc kubenswrapper[4982]: E1205 19:16:15.895399 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:16.395377679 +0000 UTC m=+155.277263674 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:15 crc kubenswrapper[4982]: I1205 19:16:15.999456 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:15 crc kubenswrapper[4982]: E1205 19:16:15.999735 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:16.49972549 +0000 UTC m=+155.381611485 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.045270 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt"] Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.101569 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:16 crc kubenswrapper[4982]: E1205 19:16:16.101959 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:16.601943992 +0000 UTC m=+155.483829987 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.152540 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-w8n5n" Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.179856 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h"] Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.206046 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:16 crc kubenswrapper[4982]: E1205 19:16:16.206395 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:16.706379765 +0000 UTC m=+155.588265770 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.253341 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-2r5rq" podStartSLOduration=127.253298034 podStartE2EDuration="2m7.253298034s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:16.225158357 +0000 UTC m=+155.107044372" watchObservedRunningTime="2025-12-05 19:16:16.253298034 +0000 UTC m=+155.135184049" Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.309834 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:16 crc kubenswrapper[4982]: E1205 19:16:16.312086 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:16.812055006 +0000 UTC m=+155.693941001 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.341660 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-p8gsl" podStartSLOduration=127.341642387 podStartE2EDuration="2m7.341642387s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:16.339527458 +0000 UTC m=+155.221413463" watchObservedRunningTime="2025-12-05 19:16:16.341642387 +0000 UTC m=+155.223528382" Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.411434 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:16 crc kubenswrapper[4982]: E1205 19:16:16.411831 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:16.91181411 +0000 UTC m=+155.793700105 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.500895 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9k6ph" podStartSLOduration=127.500877269 podStartE2EDuration="2m7.500877269s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:16.394286297 +0000 UTC m=+155.276172302" watchObservedRunningTime="2025-12-05 19:16:16.500877269 +0000 UTC m=+155.382763264" Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.508577 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-w8n5n" podStartSLOduration=127.508557486 podStartE2EDuration="2m7.508557486s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:16.505202649 +0000 UTC m=+155.387088644" watchObservedRunningTime="2025-12-05 19:16:16.508557486 +0000 UTC m=+155.390443481" Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.508986 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ngqbv"] Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.519836 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:16 crc kubenswrapper[4982]: E1205 19:16:16.520216 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:17.020199684 +0000 UTC m=+155.902085679 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.570390 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xtdn4"] Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.588505 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q"] Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.623010 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:16 crc kubenswrapper[4982]: E1205 19:16:16.623303 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:17.123291075 +0000 UTC m=+156.005177060 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.636238 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-sp47z"] Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.636863 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-c4ssd" podStartSLOduration=127.636851687 podStartE2EDuration="2m7.636851687s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:16.62783347 +0000 UTC m=+155.509719465" watchObservedRunningTime="2025-12-05 19:16:16.636851687 +0000 UTC m=+155.518737682" Dec 05 19:16:16 crc kubenswrapper[4982]: W1205 19:16:16.643100 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podee8c296e_5e20_47d2_b161_d5c610f7b6a9.slice/crio-26146702d83d8d874c52bd76eb43e538e9b95aecfe174e2cffd6eb8d593b02e3 WatchSource:0}: Error finding container 26146702d83d8d874c52bd76eb43e538e9b95aecfe174e2cffd6eb8d593b02e3: Status 404 returned error can't find the container with id 26146702d83d8d874c52bd76eb43e538e9b95aecfe174e2cffd6eb8d593b02e3 Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.701070 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp"] Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.723518 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:16 crc kubenswrapper[4982]: E1205 19:16:16.723731 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:17.223718966 +0000 UTC m=+156.105604951 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.770315 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7"] Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.776212 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7"] Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.777684 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv"] Dec 05 19:16:16 crc kubenswrapper[4982]: W1205 19:16:16.817450 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72f8774d_c2a9_4489_9812_2b72525fe9d9.slice/crio-0c162c7dddeaccaaeb64586b792fb774bf65b773e8990fec168691fdf276291e WatchSource:0}: Error finding container 0c162c7dddeaccaaeb64586b792fb774bf65b773e8990fec168691fdf276291e: Status 404 returned error can't find the container with id 0c162c7dddeaccaaeb64586b792fb774bf65b773e8990fec168691fdf276291e Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.824843 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-5kg8b"] Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.825927 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:16 crc kubenswrapper[4982]: E1205 19:16:16.826175 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:17.326165263 +0000 UTC m=+156.208051258 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.863553 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-59w99"] Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.875516 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs" event={"ID":"848a0af4-3fad-4f70-92d1-d96883ded7bc","Type":"ContainerStarted","Data":"5bc002d33041c461e92086ce37a399a8ed21951e10ad5675e4bcc88013463e5c"} Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.878533 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" event={"ID":"9c78cdb7-3299-4b32-b043-77323397f4e7","Type":"ContainerStarted","Data":"e8ea1858fc7c845049cfd79a4016191cc2590bbf17f4484151afdd4bbf5b9c45"} Dec 05 19:16:16 crc kubenswrapper[4982]: W1205 19:16:16.881898 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25b3bfdc_d13e_4dfb_8a21_59aaa07c3871.slice/crio-40240269f442febdb37be042e2e691badb6901defff71189eee5d4208f34c344 WatchSource:0}: Error finding container 40240269f442febdb37be042e2e691badb6901defff71189eee5d4208f34c344: Status 404 returned error can't find the container with id 40240269f442febdb37be042e2e691badb6901defff71189eee5d4208f34c344 Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.906677 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-tfh59" event={"ID":"4487ea8f-6e7c-4963-8b8a-495beef6b634","Type":"ContainerStarted","Data":"567e94baf745a539d226a3c905e852a21f046040b1d902e341b0df631f276301"} Dec 05 19:16:16 crc kubenswrapper[4982]: W1205 19:16:16.923017 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod416dafe6_da57_4f4c_b550_4ef07e293c90.slice/crio-ed2cce1f368983610e4b9a01e6b6a6a1c01a3f45c9b4ce25482b8c6900e8b715 WatchSource:0}: Error finding container ed2cce1f368983610e4b9a01e6b6a6a1c01a3f45c9b4ce25482b8c6900e8b715: Status 404 returned error can't find the container with id ed2cce1f368983610e4b9a01e6b6a6a1c01a3f45c9b4ce25482b8c6900e8b715 Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.924833 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" event={"ID":"b9c8120d-5b91-4dd4-8d6c-dab96d4cb46c","Type":"ContainerStarted","Data":"7a015e376e2b606df3a6a949b23ab8f58b2b810cabbda0ec7be70e86216d6e3e"} Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.926626 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:16 crc kubenswrapper[4982]: E1205 19:16:16.926929 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:17.426914191 +0000 UTC m=+156.308800186 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.935811 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" event={"ID":"52af244c-8bc5-4cd4-8d87-937dcb4137c1","Type":"ContainerStarted","Data":"ac4f15cd2619aac4119241cb5bb06a249e1145f18cdfd0a201252e9695f06c45"} Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.948599 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-nkzjs" podStartSLOduration=127.94858538 podStartE2EDuration="2m7.94858538s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:16.914597818 +0000 UTC m=+155.796483813" watchObservedRunningTime="2025-12-05 19:16:16.94858538 +0000 UTC m=+155.830471375" Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.948945 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-tfh59" podStartSLOduration=6.948941588 podStartE2EDuration="6.948941588s" podCreationTimestamp="2025-12-05 19:16:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:16.946926081 +0000 UTC m=+155.828812076" watchObservedRunningTime="2025-12-05 19:16:16.948941588 +0000 UTC m=+155.830827583" Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.963914 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" event={"ID":"54fb6e13-86a5-45f3-8640-28735d6db34f","Type":"ContainerStarted","Data":"1f6d51748ec58be437c3f73c7039a8876e6ce3f48997ce68da92c36abf22aa8d"} Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.964326 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.965741 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" event={"ID":"ca52d832-2bf9-49ca-a601-fc00d355efa3","Type":"ContainerStarted","Data":"e524e17e4c1b6da8405007282a9b8911c4159a75abbe81b780be5cbcfd71f2f6"} Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.980035 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-9qkfb" podStartSLOduration=127.980020353 podStartE2EDuration="2m7.980020353s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:16.978814755 +0000 UTC m=+155.860700770" watchObservedRunningTime="2025-12-05 19:16:16.980020353 +0000 UTC m=+155.861906348" Dec 05 19:16:16 crc kubenswrapper[4982]: I1205 19:16:16.990324 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-jpmmc" event={"ID":"5f38bb1c-e651-4dbb-8ca6-f2245e296df9","Type":"ContainerStarted","Data":"873ca36048a5674d8a58d96cd21c68fbd050a1a2605aefaa44000f47fc54e643"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.028156 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:17 crc kubenswrapper[4982]: E1205 19:16:17.029434 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:17.529419649 +0000 UTC m=+156.411305644 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.029764 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" podStartSLOduration=127.029750817 podStartE2EDuration="2m7.029750817s" podCreationTimestamp="2025-12-05 19:14:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.029195834 +0000 UTC m=+155.911081829" watchObservedRunningTime="2025-12-05 19:16:17.029750817 +0000 UTC m=+155.911636812" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.040423 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-sp47z" event={"ID":"115ca87a-0d80-431b-a1c9-9a013d387a73","Type":"ContainerStarted","Data":"5d5849b59127ebe8b0d8d1b4370fd054d1a1ebf9b9f9dab2e562496974dd285e"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.104042 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-xkt4w" podStartSLOduration=128.104028286 podStartE2EDuration="2m8.104028286s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.103531354 +0000 UTC m=+155.985417349" watchObservedRunningTime="2025-12-05 19:16:17.104028286 +0000 UTC m=+155.985914281" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.106388 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" event={"ID":"c2bde128-2402-4be6-bbe8-ef3518e58045","Type":"ContainerStarted","Data":"06367966c92903df86409e4f479a9a62382fb13013625ff88736b2e844d40eb1"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.163748 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:17 crc kubenswrapper[4982]: E1205 19:16:17.165905 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:17.665885119 +0000 UTC m=+156.547771104 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.177698 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" event={"ID":"47f30033-f3ed-45d5-ad06-4ed3ac9a4db6","Type":"ContainerStarted","Data":"51caf5bb3bf12a8a1cec229e2bca00d6460243781cc2da6bb780bbe2f2f2e41c"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.188755 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" podStartSLOduration=128.188734255 podStartE2EDuration="2m8.188734255s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.162223865 +0000 UTC m=+156.044109860" watchObservedRunningTime="2025-12-05 19:16:17.188734255 +0000 UTC m=+156.070620250" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.262839 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xtdn4" event={"ID":"62fa1e5c-e287-40d9-b98e-2bd536b193ed","Type":"ContainerStarted","Data":"3dd2cb18730611e88206a0f23a2ee31f77dc33af049b73d9c648902ab22ce5ad"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.270076 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:17 crc kubenswrapper[4982]: E1205 19:16:17.270467 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:17.770453785 +0000 UTC m=+156.652339780 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.288560 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" event={"ID":"2674c1ed-7389-410e-8720-82e3e9086952","Type":"ContainerStarted","Data":"16fb19551b5c806f5c6473f658f35a9a3f919eda0ba78f19e0364a17c3a1329c"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.294594 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-rj5rw" podStartSLOduration=127.29456732 podStartE2EDuration="2m7.29456732s" podCreationTimestamp="2025-12-05 19:14:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.261640752 +0000 UTC m=+156.143526737" watchObservedRunningTime="2025-12-05 19:16:17.29456732 +0000 UTC m=+156.176453315" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.295044 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-jpmmc" podStartSLOduration=7.29503703 podStartE2EDuration="7.29503703s" podCreationTimestamp="2025-12-05 19:16:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.218338636 +0000 UTC m=+156.100224631" watchObservedRunningTime="2025-12-05 19:16:17.29503703 +0000 UTC m=+156.176923025" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.341648 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6tbm7" event={"ID":"09720fb9-ddb6-4a28-9187-5edbee74b5bd","Type":"ContainerStarted","Data":"396f906aceedf0415fca6b2f834b49bfc547d232d9bbc0468c2000c2611a1b5c"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.368361 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" event={"ID":"26c02f43-04d0-49fd-a8c0-5e6856cef9f2","Type":"ContainerStarted","Data":"ea6209d3b81e69f65c888d1a6859efd7eb884f17e373f7b43c739564fe300e64"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.370755 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:17 crc kubenswrapper[4982]: E1205 19:16:17.371790 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:17.871773936 +0000 UTC m=+156.753659931 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.372393 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" event={"ID":"ee8c296e-5e20-47d2-b161-d5c610f7b6a9","Type":"ContainerStarted","Data":"26146702d83d8d874c52bd76eb43e538e9b95aecfe174e2cffd6eb8d593b02e3"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.383674 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" event={"ID":"cbf7a1ef-6c35-4e9e-9ec3-8b1be9fbbc3d","Type":"ContainerStarted","Data":"e05bc6ec8137da1e67427261fc67903180ea2fe72204cfcd4f852b04018a7e87"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.432378 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" event={"ID":"4f105472-b420-4bb3-877d-663d96eed1af","Type":"ContainerStarted","Data":"3ca10dd8a85b3b1324c89537765a128ba83445d6da2cead6e7c49f6e71eda6cc"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.433261 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.445607 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.448630 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-rk7nv" podStartSLOduration=128.448616064 podStartE2EDuration="2m8.448616064s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.447639291 +0000 UTC m=+156.329525286" watchObservedRunningTime="2025-12-05 19:16:17.448616064 +0000 UTC m=+156.330502059" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.450917 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" podStartSLOduration=77.450906606 podStartE2EDuration="1m17.450906606s" podCreationTimestamp="2025-12-05 19:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.329578775 +0000 UTC m=+156.211464790" watchObservedRunningTime="2025-12-05 19:16:17.450906606 +0000 UTC m=+156.332792601" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.451207 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.456543 4982 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-qsnxp container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.456599 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" podUID="4f105472-b420-4bb3-877d-663d96eed1af" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.458058 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-k66n9" event={"ID":"691aea14-6408-453c-b4c1-99e2760ab531","Type":"ContainerStarted","Data":"5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.466680 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-xzjmw" event={"ID":"a75c41a7-8e00-45d8-b5a7-5a19d4aa3114","Type":"ContainerStarted","Data":"10abd654f9beba03e205250b1af9d49d02d76adf177504d6d92935de092c8bd5"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.472500 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:17 crc kubenswrapper[4982]: E1205 19:16:17.472846 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:17.972834581 +0000 UTC m=+156.854720576 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.508280 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-k66n9" podStartSLOduration=128.508260906 podStartE2EDuration="2m8.508260906s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.506813603 +0000 UTC m=+156.388699598" watchObservedRunningTime="2025-12-05 19:16:17.508260906 +0000 UTC m=+156.390146891" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.509375 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" podStartSLOduration=127.509367101 podStartE2EDuration="2m7.509367101s" podCreationTimestamp="2025-12-05 19:14:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.485024901 +0000 UTC m=+156.366910916" watchObservedRunningTime="2025-12-05 19:16:17.509367101 +0000 UTC m=+156.391253096" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.516549 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" event={"ID":"dbc4424a-a893-4cab-a2ed-d29155d30633","Type":"ContainerStarted","Data":"a334fafca5c2ad509227b0f8a927d92ae8fe33bd6c591a969491083c606828bf"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.544068 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-xzjmw" podStartSLOduration=128.544045549 podStartE2EDuration="2m8.544045549s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.534669104 +0000 UTC m=+156.416555099" watchObservedRunningTime="2025-12-05 19:16:17.544045549 +0000 UTC m=+156.425931544" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.563443 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" event={"ID":"87744d6f-b352-49ee-8978-c50c5ec247b4","Type":"ContainerStarted","Data":"53b98be2e59e8a960c9d4847387e88d93459899581d31baab5c615e42e1ee1be"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.574077 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-q84b6" podStartSLOduration=127.57406125 podStartE2EDuration="2m7.57406125s" podCreationTimestamp="2025-12-05 19:14:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.573314123 +0000 UTC m=+156.455200118" watchObservedRunningTime="2025-12-05 19:16:17.57406125 +0000 UTC m=+156.455947245" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.576256 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:17 crc kubenswrapper[4982]: E1205 19:16:17.576458 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:18.076436314 +0000 UTC m=+156.958322309 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.576795 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:17 crc kubenswrapper[4982]: E1205 19:16:17.580433 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:18.080420576 +0000 UTC m=+156.962306571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.604262 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-l248k" podStartSLOduration=128.604242654 podStartE2EDuration="2m8.604242654s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.602192937 +0000 UTC m=+156.484078922" watchObservedRunningTime="2025-12-05 19:16:17.604242654 +0000 UTC m=+156.486128649" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.620618 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hd46v" event={"ID":"13eb0fde-8d53-4954-9cb1-5f1641cb0d14","Type":"ContainerStarted","Data":"a53fdc439be0aca4b5378053c228ecaf2406355b038e9c8c79a3d3ea0ddfb12d"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.663762 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" event={"ID":"8d4c7ce2-7724-494b-b86a-23627074ce45","Type":"ContainerStarted","Data":"732f2de08c2afad4d43271ba94024f2f45522a2a9cd46b8450860d53a2b58d3b"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.666100 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hd46v" podStartSLOduration=128.666081597 podStartE2EDuration="2m8.666081597s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.663895177 +0000 UTC m=+156.545781172" watchObservedRunningTime="2025-12-05 19:16:17.666081597 +0000 UTC m=+156.547967582" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.678179 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:17 crc kubenswrapper[4982]: E1205 19:16:17.679087 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:18.179072766 +0000 UTC m=+157.060958761 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.685332 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" event={"ID":"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f","Type":"ContainerStarted","Data":"a9aa2679ef4408f7e3226acb0b0323331c9eaaf46983fbde4fb922bf859f898e"} Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.686077 4982 patch_prober.go:28] interesting pod/downloads-7954f5f757-p8gsl container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.686105 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-p8gsl" podUID="34ea9a7e-c915-4142-8718-df09afb6e362" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.780305 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:17 crc kubenswrapper[4982]: E1205 19:16:17.789340 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:18.289323052 +0000 UTC m=+157.171209047 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.789627 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.826064 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" podStartSLOduration=127.826047087 podStartE2EDuration="2m7.826047087s" podCreationTimestamp="2025-12-05 19:14:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:17.69534847 +0000 UTC m=+156.577234465" watchObservedRunningTime="2025-12-05 19:16:17.826047087 +0000 UTC m=+156.707933082" Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.881365 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:17 crc kubenswrapper[4982]: E1205 19:16:17.882801 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:18.382786243 +0000 UTC m=+157.264672238 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:17 crc kubenswrapper[4982]: I1205 19:16:17.988044 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:17 crc kubenswrapper[4982]: E1205 19:16:17.988615 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:18.488603617 +0000 UTC m=+157.370489612 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.090000 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:18 crc kubenswrapper[4982]: E1205 19:16:18.090095 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:18.590073162 +0000 UTC m=+157.471959157 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.090353 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:18 crc kubenswrapper[4982]: E1205 19:16:18.090693 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:18.590681836 +0000 UTC m=+157.472567831 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.191352 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:18 crc kubenswrapper[4982]: E1205 19:16:18.191938 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:18.691903844 +0000 UTC m=+157.573789829 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.222211 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.236563 4982 patch_prober.go:28] interesting pod/router-default-5444994796-xzjmw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 19:16:18 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Dec 05 19:16:18 crc kubenswrapper[4982]: [+]process-running ok Dec 05 19:16:18 crc kubenswrapper[4982]: healthz check failed Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.236609 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xzjmw" podUID="a75c41a7-8e00-45d8-b5a7-5a19d4aa3114" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.301713 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:18 crc kubenswrapper[4982]: E1205 19:16:18.302007 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:18.801996027 +0000 UTC m=+157.683882022 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.403067 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:18 crc kubenswrapper[4982]: E1205 19:16:18.403241 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:18.903217036 +0000 UTC m=+157.785103031 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.403348 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:18 crc kubenswrapper[4982]: E1205 19:16:18.403659 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:18.903648386 +0000 UTC m=+157.785534381 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.504414 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:18 crc kubenswrapper[4982]: E1205 19:16:18.504818 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:19.004793293 +0000 UTC m=+157.886679288 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.526493 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-rwjj5" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.607000 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:18 crc kubenswrapper[4982]: E1205 19:16:18.607414 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:19.107398884 +0000 UTC m=+157.989284879 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.691273 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6tbm7" event={"ID":"09720fb9-ddb6-4a28-9187-5edbee74b5bd","Type":"ContainerStarted","Data":"0477048911dbfa373cea9894e230f9d6834b079c12d1bbad7da1f01658cb9300"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.700203 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7" event={"ID":"72f8774d-c2a9-4489-9812-2b72525fe9d9","Type":"ContainerStarted","Data":"e651e3e42264bedfdaeda5e8fcbdd1a79ae156e28903ac5c6b3a8f35873879f3"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.700228 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7" event={"ID":"72f8774d-c2a9-4489-9812-2b72525fe9d9","Type":"ContainerStarted","Data":"0c162c7dddeaccaaeb64586b792fb774bf65b773e8990fec168691fdf276291e"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.707846 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:18 crc kubenswrapper[4982]: E1205 19:16:18.708177 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:19.208161692 +0000 UTC m=+158.090047687 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.713837 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" event={"ID":"26c02f43-04d0-49fd-a8c0-5e6856cef9f2","Type":"ContainerStarted","Data":"ac06088b0c07e4a53e8a5871364afac249f252063c037e19d3d95f674d80e277"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.713871 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" event={"ID":"26c02f43-04d0-49fd-a8c0-5e6856cef9f2","Type":"ContainerStarted","Data":"47a50a82a2c18f5e40a360f86468eeab3cfac1d876eaeba74ba0b856ce72afa0"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.713973 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.727380 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-6tbm7" podStartSLOduration=129.727366224 podStartE2EDuration="2m9.727366224s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:18.724621281 +0000 UTC m=+157.606507276" watchObservedRunningTime="2025-12-05 19:16:18.727366224 +0000 UTC m=+157.609252219" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.740582 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-5kg8b" event={"ID":"7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922","Type":"ContainerStarted","Data":"802c8707177c72eb1f41649b2d126e79726b2d11982823d0c3b2de151b2c3cc1"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.740627 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-5kg8b" event={"ID":"7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922","Type":"ContainerStarted","Data":"431408a41215ffb4888195b751e63b601d9fde0a03da8df9aba2b1c671e8b75c"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.753110 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-xctm7" podStartSLOduration=128.753095546 podStartE2EDuration="2m8.753095546s" podCreationTimestamp="2025-12-05 19:14:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:18.752643365 +0000 UTC m=+157.634529350" watchObservedRunningTime="2025-12-05 19:16:18.753095546 +0000 UTC m=+157.634981541" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.769845 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" podStartSLOduration=129.769830141 podStartE2EDuration="2m9.769830141s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:18.769828731 +0000 UTC m=+157.651714726" watchObservedRunningTime="2025-12-05 19:16:18.769830141 +0000 UTC m=+157.651716136" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.774125 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" event={"ID":"9c78cdb7-3299-4b32-b043-77323397f4e7","Type":"ContainerStarted","Data":"b7d0ea2338236ab27ca8f2f1860a2a202669ecf494a3155af5ecdd90c32bb187"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.775059 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.800309 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" podStartSLOduration=128.800293092 podStartE2EDuration="2m8.800293092s" podCreationTimestamp="2025-12-05 19:14:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:18.799892262 +0000 UTC m=+157.681778257" watchObservedRunningTime="2025-12-05 19:16:18.800293092 +0000 UTC m=+157.682179087" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.802864 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" event={"ID":"2674c1ed-7389-410e-8720-82e3e9086952","Type":"ContainerStarted","Data":"8e7145bfce4e740179c57b1dda03dcf8eab4fa68c622adad54fafe0ffad9c7fb"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.803012 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" event={"ID":"2674c1ed-7389-410e-8720-82e3e9086952","Type":"ContainerStarted","Data":"cec14cbfb08211872d1629313459c40637cec4f0f73c768287b4099067dd638f"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.812819 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:18 crc kubenswrapper[4982]: E1205 19:16:18.814706 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:19.314694133 +0000 UTC m=+158.196580128 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.827210 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-n5kkt" podStartSLOduration=129.82719312 podStartE2EDuration="2m9.82719312s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:18.824951779 +0000 UTC m=+157.706837774" watchObservedRunningTime="2025-12-05 19:16:18.82719312 +0000 UTC m=+157.709079115" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.846500 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-bh4fj" event={"ID":"8d4c7ce2-7724-494b-b86a-23627074ce45","Type":"ContainerStarted","Data":"53b258bf3ce11e122e0523c9fdb03c0af743a805cbe5c019734a86965fb7abbb"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.853923 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" event={"ID":"4f105472-b420-4bb3-877d-663d96eed1af","Type":"ContainerStarted","Data":"f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.869438 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" event={"ID":"888914ad-2e3b-4013-886a-3e4eaf653ab7","Type":"ContainerStarted","Data":"8142fd45e35a8deff335a6e26a18628f1dea1f3cb038a2cb9e071c0d112b1490"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.878252 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" event={"ID":"b02dee03-310b-4a43-b1ad-de5efd1031a3","Type":"ContainerStarted","Data":"c0f7a1403db93120ae528456a364f499198d6f4049e6094ca86402e3c806c8f8"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.878291 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" event={"ID":"b02dee03-310b-4a43-b1ad-de5efd1031a3","Type":"ContainerStarted","Data":"8cc581ea61aadb16c6c58f5bba5cea4aa4ec82cd47eee664626acf8d58d3f1ce"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.880072 4982 generic.go:334] "Generic (PLEG): container finished" podID="115ca87a-0d80-431b-a1c9-9a013d387a73" containerID="ae0a6e26a06cc2099bd85a6f9a9f469b676ddce307d675ea7857f843a7ee1a29" exitCode=0 Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.880120 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-sp47z" event={"ID":"115ca87a-0d80-431b-a1c9-9a013d387a73","Type":"ContainerDied","Data":"ae0a6e26a06cc2099bd85a6f9a9f469b676ddce307d675ea7857f843a7ee1a29"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.888384 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" event={"ID":"25b3bfdc-d13e-4dfb-8a21-59aaa07c3871","Type":"ContainerStarted","Data":"b3b04bf62dfaa337408615142dc9634128b19a5470552e233e852a7b74b2164e"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.888640 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" event={"ID":"25b3bfdc-d13e-4dfb-8a21-59aaa07c3871","Type":"ContainerStarted","Data":"40240269f442febdb37be042e2e691badb6901defff71189eee5d4208f34c344"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.889099 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.916576 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:18 crc kubenswrapper[4982]: E1205 19:16:18.917232 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:19.417218122 +0000 UTC m=+158.299104117 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.920399 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" event={"ID":"f0aef6cf-30ed-4fc5-b84d-585e4692afe9","Type":"ContainerStarted","Data":"f724160aff553a57e8ffc46d0828689a3ed89dca5d69621ba675eb0c61e33a7d"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.921203 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.930940 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-p9xfl" podStartSLOduration=129.930927247 podStartE2EDuration="2m9.930927247s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:18.903262411 +0000 UTC m=+157.785148406" watchObservedRunningTime="2025-12-05 19:16:18.930927247 +0000 UTC m=+157.812813242" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.932467 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" podStartSLOduration=129.932461502 podStartE2EDuration="2m9.932461502s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:18.929986256 +0000 UTC m=+157.811872251" watchObservedRunningTime="2025-12-05 19:16:18.932461502 +0000 UTC m=+157.814347497" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.952731 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-jdcq7" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.967489 4982 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-wdf6s container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" start-of-body= Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.967548 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" podUID="f0aef6cf-30ed-4fc5-b84d-585e4692afe9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.35:8080/healthz\": dial tcp 10.217.0.35:8080: connect: connection refused" Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.972379 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" event={"ID":"c2c6637f-1ebe-4343-8d20-9aa14df3cc2f","Type":"ContainerStarted","Data":"7a1ba122d08bd3f3e7c95e325c01fed03de620df2d0cca0697b7b25ea835162a"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.992900 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" event={"ID":"3027be5b-ae8f-442c-a924-0dc7434e6b1c","Type":"ContainerStarted","Data":"5776ec9a77694c3fbc11c72eff34e1e5ce82534aab98e94e25b474b5c6704cd7"} Dec 05 19:16:18 crc kubenswrapper[4982]: I1205 19:16:18.993965 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.003336 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-59w99" podStartSLOduration=130.003318543 podStartE2EDuration="2m10.003318543s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:19.001265135 +0000 UTC m=+157.883151120" watchObservedRunningTime="2025-12-05 19:16:19.003318543 +0000 UTC m=+157.885204538" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.020787 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.021215 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:19 crc kubenswrapper[4982]: E1205 19:16:19.023517 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:19.523503957 +0000 UTC m=+158.405389952 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.028224 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xtdn4" event={"ID":"62fa1e5c-e287-40d9-b98e-2bd536b193ed","Type":"ContainerStarted","Data":"a54d91ebe2e522e532276654d21e4f423640638076685e831b5d750ef91273fb"} Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.029425 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.036333 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m7scf" podStartSLOduration=130.036316162 podStartE2EDuration="2m10.036316162s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:19.027570571 +0000 UTC m=+157.909456586" watchObservedRunningTime="2025-12-05 19:16:19.036316162 +0000 UTC m=+157.918202157" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.059805 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" event={"ID":"416dafe6-da57-4f4c-b550-4ef07e293c90","Type":"ContainerStarted","Data":"dc44c626f13bd4cfb4d3d2752f79730b9d6bf09bcbb00d4c836df34cc49b733a"} Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.060115 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" event={"ID":"416dafe6-da57-4f4c-b550-4ef07e293c90","Type":"ContainerStarted","Data":"e6b569ff11fd29c88e23c366f3e559d7ece9d4139c15a378452fe27db2ed41b1"} Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.060201 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" event={"ID":"416dafe6-da57-4f4c-b550-4ef07e293c90","Type":"ContainerStarted","Data":"ed2cce1f368983610e4b9a01e6b6a6a1c01a3f45c9b4ce25482b8c6900e8b715"} Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.101193 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4xlkq" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.123045 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:19 crc kubenswrapper[4982]: E1205 19:16:19.125186 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:19.625089914 +0000 UTC m=+158.506975909 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.129216 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:19 crc kubenswrapper[4982]: E1205 19:16:19.132637 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:19.632619947 +0000 UTC m=+158.514505942 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.203407 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" podStartSLOduration=129.203394906 podStartE2EDuration="2m9.203394906s" podCreationTimestamp="2025-12-05 19:14:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:19.201733078 +0000 UTC m=+158.083619073" watchObservedRunningTime="2025-12-05 19:16:19.203394906 +0000 UTC m=+158.085280901" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.204293 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-9vzmg" podStartSLOduration=130.204288066 podStartE2EDuration="2m10.204288066s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:19.133103159 +0000 UTC m=+158.014989154" watchObservedRunningTime="2025-12-05 19:16:19.204288066 +0000 UTC m=+158.086174061" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.230737 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:19 crc kubenswrapper[4982]: E1205 19:16:19.231476 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:19.731459791 +0000 UTC m=+158.613345786 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.246690 4982 patch_prober.go:28] interesting pod/router-default-5444994796-xzjmw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 19:16:19 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Dec 05 19:16:19 crc kubenswrapper[4982]: [+]process-running ok Dec 05 19:16:19 crc kubenswrapper[4982]: healthz check failed Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.247003 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xzjmw" podUID="a75c41a7-8e00-45d8-b5a7-5a19d4aa3114" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.334218 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:19 crc kubenswrapper[4982]: E1205 19:16:19.334563 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:19.834552683 +0000 UTC m=+158.716438678 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.354981 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-xtdn4" podStartSLOduration=130.354964193 podStartE2EDuration="2m10.354964193s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:19.352322042 +0000 UTC m=+158.234208037" watchObservedRunningTime="2025-12-05 19:16:19.354964193 +0000 UTC m=+158.236850188" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.376978 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vb9rv" podStartSLOduration=130.376958739 podStartE2EDuration="2m10.376958739s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:19.375939536 +0000 UTC m=+158.257825541" watchObservedRunningTime="2025-12-05 19:16:19.376958739 +0000 UTC m=+158.258844734" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.438557 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:19 crc kubenswrapper[4982]: E1205 19:16:19.438661 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:19.938637508 +0000 UTC m=+158.820523503 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.438841 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:19 crc kubenswrapper[4982]: E1205 19:16:19.439223 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:19.939206721 +0000 UTC m=+158.821092716 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.520095 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6k2jq"] Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.521054 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.524453 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.531122 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6k2jq"] Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.539647 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:19 crc kubenswrapper[4982]: E1205 19:16:19.540765 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:20.040749727 +0000 UTC m=+158.922635722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.641026 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4718733b-932d-413f-9b5c-3c8a773df710-catalog-content\") pod \"certified-operators-6k2jq\" (UID: \"4718733b-932d-413f-9b5c-3c8a773df710\") " pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.641093 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k8px\" (UniqueName: \"kubernetes.io/projected/4718733b-932d-413f-9b5c-3c8a773df710-kube-api-access-8k8px\") pod \"certified-operators-6k2jq\" (UID: \"4718733b-932d-413f-9b5c-3c8a773df710\") " pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.641130 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4718733b-932d-413f-9b5c-3c8a773df710-utilities\") pod \"certified-operators-6k2jq\" (UID: \"4718733b-932d-413f-9b5c-3c8a773df710\") " pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.641190 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:19 crc kubenswrapper[4982]: E1205 19:16:19.641459 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:20.141446444 +0000 UTC m=+159.023332429 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.703066 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-tj24q" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.714846 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bhlmf"] Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.715802 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.720185 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.727370 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bhlmf"] Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.742440 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.742665 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k8px\" (UniqueName: \"kubernetes.io/projected/4718733b-932d-413f-9b5c-3c8a773df710-kube-api-access-8k8px\") pod \"certified-operators-6k2jq\" (UID: \"4718733b-932d-413f-9b5c-3c8a773df710\") " pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.742715 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4718733b-932d-413f-9b5c-3c8a773df710-utilities\") pod \"certified-operators-6k2jq\" (UID: \"4718733b-932d-413f-9b5c-3c8a773df710\") " pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.742768 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4718733b-932d-413f-9b5c-3c8a773df710-catalog-content\") pod \"certified-operators-6k2jq\" (UID: \"4718733b-932d-413f-9b5c-3c8a773df710\") " pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.743463 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4718733b-932d-413f-9b5c-3c8a773df710-catalog-content\") pod \"certified-operators-6k2jq\" (UID: \"4718733b-932d-413f-9b5c-3c8a773df710\") " pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:16:19 crc kubenswrapper[4982]: E1205 19:16:19.743533 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:20.243519262 +0000 UTC m=+159.125405257 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.743685 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4718733b-932d-413f-9b5c-3c8a773df710-utilities\") pod \"certified-operators-6k2jq\" (UID: \"4718733b-932d-413f-9b5c-3c8a773df710\") " pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.797012 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k8px\" (UniqueName: \"kubernetes.io/projected/4718733b-932d-413f-9b5c-3c8a773df710-kube-api-access-8k8px\") pod \"certified-operators-6k2jq\" (UID: \"4718733b-932d-413f-9b5c-3c8a773df710\") " pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.838180 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.844001 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/335c7642-44cd-41bf-99ac-9c9fcbbe74be-utilities\") pod \"community-operators-bhlmf\" (UID: \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\") " pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.844039 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcn4x\" (UniqueName: \"kubernetes.io/projected/335c7642-44cd-41bf-99ac-9c9fcbbe74be-kube-api-access-wcn4x\") pod \"community-operators-bhlmf\" (UID: \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\") " pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.844062 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.844095 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/335c7642-44cd-41bf-99ac-9c9fcbbe74be-catalog-content\") pod \"community-operators-bhlmf\" (UID: \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\") " pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:16:19 crc kubenswrapper[4982]: E1205 19:16:19.844431 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:20.344420864 +0000 UTC m=+159.226306859 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.922101 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jnr8b"] Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.923019 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.947543 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.947805 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcn4x\" (UniqueName: \"kubernetes.io/projected/335c7642-44cd-41bf-99ac-9c9fcbbe74be-kube-api-access-wcn4x\") pod \"community-operators-bhlmf\" (UID: \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\") " pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.947856 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/335c7642-44cd-41bf-99ac-9c9fcbbe74be-catalog-content\") pod \"community-operators-bhlmf\" (UID: \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\") " pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.947910 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/335c7642-44cd-41bf-99ac-9c9fcbbe74be-utilities\") pod \"community-operators-bhlmf\" (UID: \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\") " pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.948333 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/335c7642-44cd-41bf-99ac-9c9fcbbe74be-utilities\") pod \"community-operators-bhlmf\" (UID: \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\") " pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:16:19 crc kubenswrapper[4982]: E1205 19:16:19.948413 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:20.448394565 +0000 UTC m=+159.330280550 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.948866 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/335c7642-44cd-41bf-99ac-9c9fcbbe74be-catalog-content\") pod \"community-operators-bhlmf\" (UID: \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\") " pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.971443 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jnr8b"] Dec 05 19:16:19 crc kubenswrapper[4982]: I1205 19:16:19.986193 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcn4x\" (UniqueName: \"kubernetes.io/projected/335c7642-44cd-41bf-99ac-9c9fcbbe74be-kube-api-access-wcn4x\") pod \"community-operators-bhlmf\" (UID: \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\") " pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.035464 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.048610 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.048647 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-catalog-content\") pod \"certified-operators-jnr8b\" (UID: \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\") " pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.048714 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-utilities\") pod \"certified-operators-jnr8b\" (UID: \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\") " pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.048735 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5snm\" (UniqueName: \"kubernetes.io/projected/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-kube-api-access-b5snm\") pod \"certified-operators-jnr8b\" (UID: \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\") " pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:16:20 crc kubenswrapper[4982]: E1205 19:16:20.049003 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:20.548972619 +0000 UTC m=+159.430858614 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.108067 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6k2jq"] Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.112667 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xtdn4" event={"ID":"62fa1e5c-e287-40d9-b98e-2bd536b193ed","Type":"ContainerStarted","Data":"b63beea2969e3e13f2433b8fb1ad97d9d38eb4ff21b1fdf1a493861b260bb09c"} Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.114388 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-5kg8b" event={"ID":"7bc5e533-52fa-4c3e-9d3f-b0cc3b74d922","Type":"ContainerStarted","Data":"b3dca25e66f844f720704a7907739b36bbb064b83cf04c27368f1ba0cc9eaa6e"} Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.115044 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.116085 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" event={"ID":"ee8c296e-5e20-47d2-b161-d5c610f7b6a9","Type":"ContainerStarted","Data":"302e2c2251cb1349534c9865563bf4ade44c000bc8282a5492da8f3181275887"} Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.116102 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" event={"ID":"ee8c296e-5e20-47d2-b161-d5c610f7b6a9","Type":"ContainerStarted","Data":"f0784e151d84909c1b1e9c5bbf40a5db71cc20347ffbfca34602d97797c215bf"} Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.121944 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-sp47z" event={"ID":"115ca87a-0d80-431b-a1c9-9a013d387a73","Type":"ContainerStarted","Data":"3aa5d31b37210b54a1a0ac64a030fb9b0ef1410bd2bb9421fde1c9dc21f7f48d"} Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.121980 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-sp47z" event={"ID":"115ca87a-0d80-431b-a1c9-9a013d387a73","Type":"ContainerStarted","Data":"756d7533c60370e64137c77e28b9716db30a06ab2a6ba431bdc72c83e80af1fc"} Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.156775 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dzwxm"] Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.161330 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.161680 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5snm\" (UniqueName: \"kubernetes.io/projected/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-kube-api-access-b5snm\") pod \"certified-operators-jnr8b\" (UID: \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\") " pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.161744 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-catalog-content\") pod \"certified-operators-jnr8b\" (UID: \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\") " pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.161807 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-utilities\") pod \"certified-operators-jnr8b\" (UID: \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\") " pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:16:20 crc kubenswrapper[4982]: E1205 19:16:20.162730 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:20.662711416 +0000 UTC m=+159.544597411 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.172855 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-utilities\") pod \"certified-operators-jnr8b\" (UID: \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\") " pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.173112 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-catalog-content\") pod \"certified-operators-jnr8b\" (UID: \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\") " pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.175615 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.175729 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:16:20 crc kubenswrapper[4982]: W1205 19:16:20.191958 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4718733b_932d_413f_9b5c_3c8a773df710.slice/crio-2ad79d55999a84f44a604eba892e299069493d430fc78ad42ce19da529b25ce2 WatchSource:0}: Error finding container 2ad79d55999a84f44a604eba892e299069493d430fc78ad42ce19da529b25ce2: Status 404 returned error can't find the container with id 2ad79d55999a84f44a604eba892e299069493d430fc78ad42ce19da529b25ce2 Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.195267 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-5kg8b" podStartSLOduration=9.195246444 podStartE2EDuration="9.195246444s" podCreationTimestamp="2025-12-05 19:16:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:20.192554062 +0000 UTC m=+159.074440057" watchObservedRunningTime="2025-12-05 19:16:20.195246444 +0000 UTC m=+159.077132439" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.224711 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dzwxm"] Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.226439 4982 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.229094 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5snm\" (UniqueName: \"kubernetes.io/projected/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-kube-api-access-b5snm\") pod \"certified-operators-jnr8b\" (UID: \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\") " pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.258577 4982 patch_prober.go:28] interesting pod/router-default-5444994796-xzjmw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 19:16:20 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Dec 05 19:16:20 crc kubenswrapper[4982]: [+]process-running ok Dec 05 19:16:20 crc kubenswrapper[4982]: healthz check failed Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.258628 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xzjmw" podUID="a75c41a7-8e00-45d8-b5a7-5a19d4aa3114" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.262441 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.270309 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aab55c2a-f07b-4f88-b89b-fe417ff42c27-catalog-content\") pod \"community-operators-dzwxm\" (UID: \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\") " pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.270528 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2s6bf\" (UniqueName: \"kubernetes.io/projected/aab55c2a-f07b-4f88-b89b-fe417ff42c27-kube-api-access-2s6bf\") pod \"community-operators-dzwxm\" (UID: \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\") " pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.270933 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aab55c2a-f07b-4f88-b89b-fe417ff42c27-utilities\") pod \"community-operators-dzwxm\" (UID: \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\") " pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.270963 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:20 crc kubenswrapper[4982]: E1205 19:16:20.281911 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:20.781897368 +0000 UTC m=+159.663783353 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.339302 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-sp47z" podStartSLOduration=131.339281008 podStartE2EDuration="2m11.339281008s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:20.278653623 +0000 UTC m=+159.160539618" watchObservedRunningTime="2025-12-05 19:16:20.339281008 +0000 UTC m=+159.221167003" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.371714 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.372026 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aab55c2a-f07b-4f88-b89b-fe417ff42c27-utilities\") pod \"community-operators-dzwxm\" (UID: \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\") " pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.372078 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aab55c2a-f07b-4f88-b89b-fe417ff42c27-catalog-content\") pod \"community-operators-dzwxm\" (UID: \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\") " pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.372126 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2s6bf\" (UniqueName: \"kubernetes.io/projected/aab55c2a-f07b-4f88-b89b-fe417ff42c27-kube-api-access-2s6bf\") pod \"community-operators-dzwxm\" (UID: \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\") " pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:16:20 crc kubenswrapper[4982]: E1205 19:16:20.372635 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:20.872616075 +0000 UTC m=+159.754502080 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.373029 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aab55c2a-f07b-4f88-b89b-fe417ff42c27-utilities\") pod \"community-operators-dzwxm\" (UID: \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\") " pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.373282 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aab55c2a-f07b-4f88-b89b-fe417ff42c27-catalog-content\") pod \"community-operators-dzwxm\" (UID: \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\") " pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.393754 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2s6bf\" (UniqueName: \"kubernetes.io/projected/aab55c2a-f07b-4f88-b89b-fe417ff42c27-kube-api-access-2s6bf\") pod \"community-operators-dzwxm\" (UID: \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\") " pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.473486 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:20 crc kubenswrapper[4982]: E1205 19:16:20.474123 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:20.97410748 +0000 UTC m=+159.855993475 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.505731 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bhlmf"] Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.560547 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.577674 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.577715 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jnr8b"] Dec 05 19:16:20 crc kubenswrapper[4982]: E1205 19:16:20.577900 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:21.077884547 +0000 UTC m=+159.959770542 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:20 crc kubenswrapper[4982]: W1205 19:16:20.582939 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc63ba14d_0cb7_4a7f_9137_2c0f88ef6726.slice/crio-e61b2eaf43957ba80224652f4ac179febf4be103255b5393d2898b08e1d39eb3 WatchSource:0}: Error finding container e61b2eaf43957ba80224652f4ac179febf4be103255b5393d2898b08e1d39eb3: Status 404 returned error can't find the container with id e61b2eaf43957ba80224652f4ac179febf4be103255b5393d2898b08e1d39eb3 Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.678698 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:20 crc kubenswrapper[4982]: E1205 19:16:20.679094 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:21.179078166 +0000 UTC m=+160.060964161 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.743970 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dzwxm"] Dec 05 19:16:20 crc kubenswrapper[4982]: W1205 19:16:20.751482 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaab55c2a_f07b_4f88_b89b_fe417ff42c27.slice/crio-ba7d6d063e4f8b98837a681e8385f4b6d62ce52ed7a05c47acef753bd440f7fc WatchSource:0}: Error finding container ba7d6d063e4f8b98837a681e8385f4b6d62ce52ed7a05c47acef753bd440f7fc: Status 404 returned error can't find the container with id ba7d6d063e4f8b98837a681e8385f4b6d62ce52ed7a05c47acef753bd440f7fc Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.781915 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:20 crc kubenswrapper[4982]: E1205 19:16:20.782099 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:21.282075865 +0000 UTC m=+160.163961860 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.782277 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:20 crc kubenswrapper[4982]: E1205 19:16:20.782575 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:21.282563947 +0000 UTC m=+160.164449942 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.883080 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:20 crc kubenswrapper[4982]: E1205 19:16:20.883279 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 19:16:21.383258163 +0000 UTC m=+160.265144168 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.883424 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:20 crc kubenswrapper[4982]: E1205 19:16:20.883814 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 19:16:21.383803576 +0000 UTC m=+160.265689571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-z7tw9" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.909771 4982 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T19:16:20.226467782Z","Handler":null,"Name":""} Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.916644 4982 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.916682 4982 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 19:16:20 crc kubenswrapper[4982]: I1205 19:16:20.983913 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.003708 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.085604 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.088310 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.088359 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.117404 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-z7tw9\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.129835 4982 generic.go:334] "Generic (PLEG): container finished" podID="c2bde128-2402-4be6-bbe8-ef3518e58045" containerID="06367966c92903df86409e4f479a9a62382fb13013625ff88736b2e844d40eb1" exitCode=0 Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.129913 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" event={"ID":"c2bde128-2402-4be6-bbe8-ef3518e58045","Type":"ContainerDied","Data":"06367966c92903df86409e4f479a9a62382fb13013625ff88736b2e844d40eb1"} Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.136652 4982 generic.go:334] "Generic (PLEG): container finished" podID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" containerID="40f5f940257980b49dfab7974efde9c6a0697ba5331cf7b260c651eddb331e64" exitCode=0 Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.136701 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnr8b" event={"ID":"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726","Type":"ContainerDied","Data":"40f5f940257980b49dfab7974efde9c6a0697ba5331cf7b260c651eddb331e64"} Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.136744 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnr8b" event={"ID":"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726","Type":"ContainerStarted","Data":"e61b2eaf43957ba80224652f4ac179febf4be103255b5393d2898b08e1d39eb3"} Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.139112 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.140003 4982 generic.go:334] "Generic (PLEG): container finished" podID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" containerID="6023170c3fa0f86bb814f82e87c505bdf73774ee8472dd4557c6a4d06052c851" exitCode=0 Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.140064 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzwxm" event={"ID":"aab55c2a-f07b-4f88-b89b-fe417ff42c27","Type":"ContainerDied","Data":"6023170c3fa0f86bb814f82e87c505bdf73774ee8472dd4557c6a4d06052c851"} Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.140094 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzwxm" event={"ID":"aab55c2a-f07b-4f88-b89b-fe417ff42c27","Type":"ContainerStarted","Data":"ba7d6d063e4f8b98837a681e8385f4b6d62ce52ed7a05c47acef753bd440f7fc"} Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.147847 4982 generic.go:334] "Generic (PLEG): container finished" podID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" containerID="2dd76e8298426f7edd33acb6616de061bacc6064cc2131e9f9964351ced3df45" exitCode=0 Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.147980 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhlmf" event={"ID":"335c7642-44cd-41bf-99ac-9c9fcbbe74be","Type":"ContainerDied","Data":"2dd76e8298426f7edd33acb6616de061bacc6064cc2131e9f9964351ced3df45"} Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.148053 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhlmf" event={"ID":"335c7642-44cd-41bf-99ac-9c9fcbbe74be","Type":"ContainerStarted","Data":"3ffa17404703287cb362623215e00f6c9b576f3ddb9f012abfb89052411911a4"} Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.156626 4982 generic.go:334] "Generic (PLEG): container finished" podID="4718733b-932d-413f-9b5c-3c8a773df710" containerID="2d30f8cf0b43c7492c5742fd050920e5fb93b28ddb01275368d419404abe2c4d" exitCode=0 Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.157173 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6k2jq" event={"ID":"4718733b-932d-413f-9b5c-3c8a773df710","Type":"ContainerDied","Data":"2d30f8cf0b43c7492c5742fd050920e5fb93b28ddb01275368d419404abe2c4d"} Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.157250 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6k2jq" event={"ID":"4718733b-932d-413f-9b5c-3c8a773df710","Type":"ContainerStarted","Data":"2ad79d55999a84f44a604eba892e299069493d430fc78ad42ce19da529b25ce2"} Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.167028 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" event={"ID":"ee8c296e-5e20-47d2-b161-d5c610f7b6a9","Type":"ContainerStarted","Data":"ad0bbbcc630a5e3d3f2ecf0ae5dba94336aee1d3c10f97babe5403d4f6d95095"} Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.222466 4982 patch_prober.go:28] interesting pod/router-default-5444994796-xzjmw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 19:16:21 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Dec 05 19:16:21 crc kubenswrapper[4982]: [+]process-running ok Dec 05 19:16:21 crc kubenswrapper[4982]: healthz check failed Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.222525 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xzjmw" podUID="a75c41a7-8e00-45d8-b5a7-5a19d4aa3114" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.320664 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.409901 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.557033 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-z7tw9"] Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.710849 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-9cd5r"] Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.711814 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.713927 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.725509 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9cd5r"] Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.727771 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.728945 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.730791 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.739547 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.749161 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.795824 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e7378a9-6783-41a0-94f1-8264296f0d54-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9e7378a9-6783-41a0-94f1-8264296f0d54\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.795882 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5527\" (UniqueName: \"kubernetes.io/projected/c2800739-c6ca-495c-a39b-b619242e6867-kube-api-access-c5527\") pod \"redhat-marketplace-9cd5r\" (UID: \"c2800739-c6ca-495c-a39b-b619242e6867\") " pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.795921 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2800739-c6ca-495c-a39b-b619242e6867-catalog-content\") pod \"redhat-marketplace-9cd5r\" (UID: \"c2800739-c6ca-495c-a39b-b619242e6867\") " pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.795972 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2800739-c6ca-495c-a39b-b619242e6867-utilities\") pod \"redhat-marketplace-9cd5r\" (UID: \"c2800739-c6ca-495c-a39b-b619242e6867\") " pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.796008 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e7378a9-6783-41a0-94f1-8264296f0d54-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9e7378a9-6783-41a0-94f1-8264296f0d54\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.896826 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5527\" (UniqueName: \"kubernetes.io/projected/c2800739-c6ca-495c-a39b-b619242e6867-kube-api-access-c5527\") pod \"redhat-marketplace-9cd5r\" (UID: \"c2800739-c6ca-495c-a39b-b619242e6867\") " pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.896871 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2800739-c6ca-495c-a39b-b619242e6867-catalog-content\") pod \"redhat-marketplace-9cd5r\" (UID: \"c2800739-c6ca-495c-a39b-b619242e6867\") " pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.896911 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2800739-c6ca-495c-a39b-b619242e6867-utilities\") pod \"redhat-marketplace-9cd5r\" (UID: \"c2800739-c6ca-495c-a39b-b619242e6867\") " pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.896951 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e7378a9-6783-41a0-94f1-8264296f0d54-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9e7378a9-6783-41a0-94f1-8264296f0d54\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.896981 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e7378a9-6783-41a0-94f1-8264296f0d54-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9e7378a9-6783-41a0-94f1-8264296f0d54\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.897054 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e7378a9-6783-41a0-94f1-8264296f0d54-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9e7378a9-6783-41a0-94f1-8264296f0d54\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.897701 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2800739-c6ca-495c-a39b-b619242e6867-catalog-content\") pod \"redhat-marketplace-9cd5r\" (UID: \"c2800739-c6ca-495c-a39b-b619242e6867\") " pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.901423 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2800739-c6ca-495c-a39b-b619242e6867-utilities\") pod \"redhat-marketplace-9cd5r\" (UID: \"c2800739-c6ca-495c-a39b-b619242e6867\") " pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.932659 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e7378a9-6783-41a0-94f1-8264296f0d54-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9e7378a9-6783-41a0-94f1-8264296f0d54\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 19:16:21 crc kubenswrapper[4982]: I1205 19:16:21.953525 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5527\" (UniqueName: \"kubernetes.io/projected/c2800739-c6ca-495c-a39b-b619242e6867-kube-api-access-c5527\") pod \"redhat-marketplace-9cd5r\" (UID: \"c2800739-c6ca-495c-a39b-b619242e6867\") " pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.023470 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.049438 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.118531 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lhxlk"] Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.119819 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.140317 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lhxlk"] Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.197492 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" event={"ID":"ee8c296e-5e20-47d2-b161-d5c610f7b6a9","Type":"ContainerStarted","Data":"8a434bd2bc10988b3a34e4843bf0aadef8b2367dd69309f7b014606d3ed15ece"} Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.200855 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" event={"ID":"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee","Type":"ContainerStarted","Data":"66fb24faa1618672ae2e998bc75da2e53cddffe1ffd4996da8cb70b1ed6aa5cc"} Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.200882 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.200893 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" event={"ID":"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee","Type":"ContainerStarted","Data":"c823051d56f0588ade7bb2535f4f83e8ca4b5c65ee56b7488155709d1c28087c"} Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.202784 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2rpz\" (UniqueName: \"kubernetes.io/projected/b9954db1-336a-4478-869c-080166403adb-kube-api-access-z2rpz\") pod \"redhat-marketplace-lhxlk\" (UID: \"b9954db1-336a-4478-869c-080166403adb\") " pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.202833 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9954db1-336a-4478-869c-080166403adb-catalog-content\") pod \"redhat-marketplace-lhxlk\" (UID: \"b9954db1-336a-4478-869c-080166403adb\") " pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.202856 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9954db1-336a-4478-869c-080166403adb-utilities\") pod \"redhat-marketplace-lhxlk\" (UID: \"b9954db1-336a-4478-869c-080166403adb\") " pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.224489 4982 patch_prober.go:28] interesting pod/router-default-5444994796-xzjmw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 19:16:22 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Dec 05 19:16:22 crc kubenswrapper[4982]: [+]process-running ok Dec 05 19:16:22 crc kubenswrapper[4982]: healthz check failed Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.224573 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xzjmw" podUID="a75c41a7-8e00-45d8-b5a7-5a19d4aa3114" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.257554 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-ngqbv" podStartSLOduration=12.257533991 podStartE2EDuration="12.257533991s" podCreationTimestamp="2025-12-05 19:16:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:22.224726866 +0000 UTC m=+161.106612861" watchObservedRunningTime="2025-12-05 19:16:22.257533991 +0000 UTC m=+161.139419986" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.259107 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" podStartSLOduration=133.259093877 podStartE2EDuration="2m13.259093877s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:16:22.252778272 +0000 UTC m=+161.134664267" watchObservedRunningTime="2025-12-05 19:16:22.259093877 +0000 UTC m=+161.140979872" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.304658 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2rpz\" (UniqueName: \"kubernetes.io/projected/b9954db1-336a-4478-869c-080166403adb-kube-api-access-z2rpz\") pod \"redhat-marketplace-lhxlk\" (UID: \"b9954db1-336a-4478-869c-080166403adb\") " pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.304828 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9954db1-336a-4478-869c-080166403adb-catalog-content\") pod \"redhat-marketplace-lhxlk\" (UID: \"b9954db1-336a-4478-869c-080166403adb\") " pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.304862 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9954db1-336a-4478-869c-080166403adb-utilities\") pod \"redhat-marketplace-lhxlk\" (UID: \"b9954db1-336a-4478-869c-080166403adb\") " pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.307225 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9954db1-336a-4478-869c-080166403adb-utilities\") pod \"redhat-marketplace-lhxlk\" (UID: \"b9954db1-336a-4478-869c-080166403adb\") " pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.309617 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9954db1-336a-4478-869c-080166403adb-catalog-content\") pod \"redhat-marketplace-lhxlk\" (UID: \"b9954db1-336a-4478-869c-080166403adb\") " pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.324795 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2rpz\" (UniqueName: \"kubernetes.io/projected/b9954db1-336a-4478-869c-080166403adb-kube-api-access-z2rpz\") pod \"redhat-marketplace-lhxlk\" (UID: \"b9954db1-336a-4478-869c-080166403adb\") " pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.455768 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.605621 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.624351 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 19:16:22 crc kubenswrapper[4982]: E1205 19:16:22.624671 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2bde128-2402-4be6-bbe8-ef3518e58045" containerName="collect-profiles" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.624689 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2bde128-2402-4be6-bbe8-ef3518e58045" containerName="collect-profiles" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.624954 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2bde128-2402-4be6-bbe8-ef3518e58045" containerName="collect-profiles" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.625409 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.629525 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.629556 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.641475 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.682108 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-9cd5r"] Dec 05 19:16:22 crc kubenswrapper[4982]: W1205 19:16:22.709452 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc2800739_c6ca_495c_a39b_b619242e6867.slice/crio-9728b8582b459467de6c7e44f1b0c721afecdeb9fdbbe9bb3bcd25c67003a24f WatchSource:0}: Error finding container 9728b8582b459467de6c7e44f1b0c721afecdeb9fdbbe9bb3bcd25c67003a24f: Status 404 returned error can't find the container with id 9728b8582b459467de6c7e44f1b0c721afecdeb9fdbbe9bb3bcd25c67003a24f Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.711608 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfgsf\" (UniqueName: \"kubernetes.io/projected/c2bde128-2402-4be6-bbe8-ef3518e58045-kube-api-access-lfgsf\") pod \"c2bde128-2402-4be6-bbe8-ef3518e58045\" (UID: \"c2bde128-2402-4be6-bbe8-ef3518e58045\") " Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.711758 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c2bde128-2402-4be6-bbe8-ef3518e58045-secret-volume\") pod \"c2bde128-2402-4be6-bbe8-ef3518e58045\" (UID: \"c2bde128-2402-4be6-bbe8-ef3518e58045\") " Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.711820 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c2bde128-2402-4be6-bbe8-ef3518e58045-config-volume\") pod \"c2bde128-2402-4be6-bbe8-ef3518e58045\" (UID: \"c2bde128-2402-4be6-bbe8-ef3518e58045\") " Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.712031 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f6ef1c7-b1af-4f06-9ce3-9e001500d08f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.712140 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7f6ef1c7-b1af-4f06-9ce3-9e001500d08f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.716994 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2bde128-2402-4be6-bbe8-ef3518e58045-config-volume" (OuterVolumeSpecName: "config-volume") pod "c2bde128-2402-4be6-bbe8-ef3518e58045" (UID: "c2bde128-2402-4be6-bbe8-ef3518e58045"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.719337 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2bde128-2402-4be6-bbe8-ef3518e58045-kube-api-access-lfgsf" (OuterVolumeSpecName: "kube-api-access-lfgsf") pod "c2bde128-2402-4be6-bbe8-ef3518e58045" (UID: "c2bde128-2402-4be6-bbe8-ef3518e58045"). InnerVolumeSpecName "kube-api-access-lfgsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.720521 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2bde128-2402-4be6-bbe8-ef3518e58045-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c2bde128-2402-4be6-bbe8-ef3518e58045" (UID: "c2bde128-2402-4be6-bbe8-ef3518e58045"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.728754 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vpkcb"] Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.732616 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vpkcb"] Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.732662 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.737040 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.738370 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 19:16:22 crc kubenswrapper[4982]: W1205 19:16:22.800089 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod9e7378a9_6783_41a0_94f1_8264296f0d54.slice/crio-7a216e134f121aebe34088219a86e16011d0a2a01c2b9de285eab349a514b931 WatchSource:0}: Error finding container 7a216e134f121aebe34088219a86e16011d0a2a01c2b9de285eab349a514b931: Status 404 returned error can't find the container with id 7a216e134f121aebe34088219a86e16011d0a2a01c2b9de285eab349a514b931 Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.813690 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10ea7812-4e92-4291-9929-636eccbae790-utilities\") pod \"redhat-operators-vpkcb\" (UID: \"10ea7812-4e92-4291-9929-636eccbae790\") " pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.813733 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ctnw\" (UniqueName: \"kubernetes.io/projected/10ea7812-4e92-4291-9929-636eccbae790-kube-api-access-2ctnw\") pod \"redhat-operators-vpkcb\" (UID: \"10ea7812-4e92-4291-9929-636eccbae790\") " pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.813769 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7f6ef1c7-b1af-4f06-9ce3-9e001500d08f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.813788 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10ea7812-4e92-4291-9929-636eccbae790-catalog-content\") pod \"redhat-operators-vpkcb\" (UID: \"10ea7812-4e92-4291-9929-636eccbae790\") " pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.813843 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f6ef1c7-b1af-4f06-9ce3-9e001500d08f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.813872 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7f6ef1c7-b1af-4f06-9ce3-9e001500d08f-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.813909 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c2bde128-2402-4be6-bbe8-ef3518e58045-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.813922 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfgsf\" (UniqueName: \"kubernetes.io/projected/c2bde128-2402-4be6-bbe8-ef3518e58045-kube-api-access-lfgsf\") on node \"crc\" DevicePath \"\"" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.813933 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c2bde128-2402-4be6-bbe8-ef3518e58045-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.829096 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f6ef1c7-b1af-4f06-9ce3-9e001500d08f-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.915208 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10ea7812-4e92-4291-9929-636eccbae790-utilities\") pod \"redhat-operators-vpkcb\" (UID: \"10ea7812-4e92-4291-9929-636eccbae790\") " pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.915268 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ctnw\" (UniqueName: \"kubernetes.io/projected/10ea7812-4e92-4291-9929-636eccbae790-kube-api-access-2ctnw\") pod \"redhat-operators-vpkcb\" (UID: \"10ea7812-4e92-4291-9929-636eccbae790\") " pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.915316 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10ea7812-4e92-4291-9929-636eccbae790-catalog-content\") pod \"redhat-operators-vpkcb\" (UID: \"10ea7812-4e92-4291-9929-636eccbae790\") " pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.915789 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10ea7812-4e92-4291-9929-636eccbae790-utilities\") pod \"redhat-operators-vpkcb\" (UID: \"10ea7812-4e92-4291-9929-636eccbae790\") " pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.915970 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10ea7812-4e92-4291-9929-636eccbae790-catalog-content\") pod \"redhat-operators-vpkcb\" (UID: \"10ea7812-4e92-4291-9929-636eccbae790\") " pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.940853 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ctnw\" (UniqueName: \"kubernetes.io/projected/10ea7812-4e92-4291-9929-636eccbae790-kube-api-access-2ctnw\") pod \"redhat-operators-vpkcb\" (UID: \"10ea7812-4e92-4291-9929-636eccbae790\") " pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:16:22 crc kubenswrapper[4982]: I1205 19:16:22.962737 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.045701 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lhxlk"] Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.048857 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.122392 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hbvk8"] Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.127938 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.139516 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hbvk8"] Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.222600 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33cc7f40-54f9-4057-9291-7d66e4f6ab18-catalog-content\") pod \"redhat-operators-hbvk8\" (UID: \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\") " pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.222694 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33cc7f40-54f9-4057-9291-7d66e4f6ab18-utilities\") pod \"redhat-operators-hbvk8\" (UID: \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\") " pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.222764 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-shbzf\" (UniqueName: \"kubernetes.io/projected/33cc7f40-54f9-4057-9291-7d66e4f6ab18-kube-api-access-shbzf\") pod \"redhat-operators-hbvk8\" (UID: \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\") " pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.248898 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lhxlk" event={"ID":"b9954db1-336a-4478-869c-080166403adb","Type":"ContainerStarted","Data":"26949e11024444cd11ff33bd1233f5c321228f66a15e5dd08efdc893ebd54a8b"} Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.251252 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9e7378a9-6783-41a0-94f1-8264296f0d54","Type":"ContainerStarted","Data":"7a216e134f121aebe34088219a86e16011d0a2a01c2b9de285eab349a514b931"} Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.251378 4982 patch_prober.go:28] interesting pod/router-default-5444994796-xzjmw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 19:16:23 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Dec 05 19:16:23 crc kubenswrapper[4982]: [+]process-running ok Dec 05 19:16:23 crc kubenswrapper[4982]: healthz check failed Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.251422 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xzjmw" podUID="a75c41a7-8e00-45d8-b5a7-5a19d4aa3114" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.252884 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9cd5r" event={"ID":"c2800739-c6ca-495c-a39b-b619242e6867","Type":"ContainerStarted","Data":"2ffcb5185676eb08927773f633416636b72c238ffc8e8ad8d566f3a48ac6ebc4"} Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.252904 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9cd5r" event={"ID":"c2800739-c6ca-495c-a39b-b619242e6867","Type":"ContainerStarted","Data":"9728b8582b459467de6c7e44f1b0c721afecdeb9fdbbe9bb3bcd25c67003a24f"} Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.269485 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.269549 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw" event={"ID":"c2bde128-2402-4be6-bbe8-ef3518e58045","Type":"ContainerDied","Data":"0dd88c84ab40ba0cc221bb460a99a6a5e1f2c2cf63aeda5dee61d7f4a8f0d08b"} Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.269578 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0dd88c84ab40ba0cc221bb460a99a6a5e1f2c2cf63aeda5dee61d7f4a8f0d08b" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.324946 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33cc7f40-54f9-4057-9291-7d66e4f6ab18-catalog-content\") pod \"redhat-operators-hbvk8\" (UID: \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\") " pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.325056 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33cc7f40-54f9-4057-9291-7d66e4f6ab18-utilities\") pod \"redhat-operators-hbvk8\" (UID: \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\") " pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.325088 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-shbzf\" (UniqueName: \"kubernetes.io/projected/33cc7f40-54f9-4057-9291-7d66e4f6ab18-kube-api-access-shbzf\") pod \"redhat-operators-hbvk8\" (UID: \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\") " pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.325757 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33cc7f40-54f9-4057-9291-7d66e4f6ab18-utilities\") pod \"redhat-operators-hbvk8\" (UID: \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\") " pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.326224 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33cc7f40-54f9-4057-9291-7d66e4f6ab18-catalog-content\") pod \"redhat-operators-hbvk8\" (UID: \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\") " pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.351056 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-shbzf\" (UniqueName: \"kubernetes.io/projected/33cc7f40-54f9-4057-9291-7d66e4f6ab18-kube-api-access-shbzf\") pod \"redhat-operators-hbvk8\" (UID: \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\") " pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.366568 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-p8gsl" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.442666 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.442701 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.446789 4982 patch_prober.go:28] interesting pod/console-f9d7485db-k66n9 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.446851 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-k66n9" podUID="691aea14-6408-453c-b4c1-99e2760ab531" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.458600 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.571397 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 19:16:23 crc kubenswrapper[4982]: W1205 19:16:23.649722 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod7f6ef1c7_b1af_4f06_9ce3_9e001500d08f.slice/crio-3cb25ef6bdebf96a68e6145475f9e267b309d35efe7d406011419422a2132eb9 WatchSource:0}: Error finding container 3cb25ef6bdebf96a68e6145475f9e267b309d35efe7d406011419422a2132eb9: Status 404 returned error can't find the container with id 3cb25ef6bdebf96a68e6145475f9e267b309d35efe7d406011419422a2132eb9 Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.677170 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vpkcb"] Dec 05 19:16:23 crc kubenswrapper[4982]: W1205 19:16:23.696776 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod10ea7812_4e92_4291_9929_636eccbae790.slice/crio-0659bdd40a6dd08c938c33212b20208055c83cd8757457c4f78a12d930a390e6 WatchSource:0}: Error finding container 0659bdd40a6dd08c938c33212b20208055c83cd8757457c4f78a12d930a390e6: Status 404 returned error can't find the container with id 0659bdd40a6dd08c938c33212b20208055c83cd8757457c4f78a12d930a390e6 Dec 05 19:16:23 crc kubenswrapper[4982]: I1205 19:16:23.828865 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hbvk8"] Dec 05 19:16:23 crc kubenswrapper[4982]: W1205 19:16:23.836736 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod33cc7f40_54f9_4057_9291_7d66e4f6ab18.slice/crio-0f2f0ef1aa7ce7072820601bf5207eb96af57825a9f2d2ee4a4af5fd9092db19 WatchSource:0}: Error finding container 0f2f0ef1aa7ce7072820601bf5207eb96af57825a9f2d2ee4a4af5fd9092db19: Status 404 returned error can't find the container with id 0f2f0ef1aa7ce7072820601bf5207eb96af57825a9f2d2ee4a4af5fd9092db19 Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.219657 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.225404 4982 patch_prober.go:28] interesting pod/router-default-5444994796-xzjmw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 19:16:24 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Dec 05 19:16:24 crc kubenswrapper[4982]: [+]process-running ok Dec 05 19:16:24 crc kubenswrapper[4982]: healthz check failed Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.225486 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xzjmw" podUID="a75c41a7-8e00-45d8-b5a7-5a19d4aa3114" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.242987 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.243272 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.250926 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.278642 4982 generic.go:334] "Generic (PLEG): container finished" podID="c2800739-c6ca-495c-a39b-b619242e6867" containerID="2ffcb5185676eb08927773f633416636b72c238ffc8e8ad8d566f3a48ac6ebc4" exitCode=0 Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.279107 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9cd5r" event={"ID":"c2800739-c6ca-495c-a39b-b619242e6867","Type":"ContainerDied","Data":"2ffcb5185676eb08927773f633416636b72c238ffc8e8ad8d566f3a48ac6ebc4"} Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.281043 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpkcb" event={"ID":"10ea7812-4e92-4291-9929-636eccbae790","Type":"ContainerDied","Data":"7b593ba526b4ed2ad40357c3120f7dc085fc071dafe1a825030f9855d8aeb270"} Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.281083 4982 generic.go:334] "Generic (PLEG): container finished" podID="10ea7812-4e92-4291-9929-636eccbae790" containerID="7b593ba526b4ed2ad40357c3120f7dc085fc071dafe1a825030f9855d8aeb270" exitCode=0 Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.281158 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpkcb" event={"ID":"10ea7812-4e92-4291-9929-636eccbae790","Type":"ContainerStarted","Data":"0659bdd40a6dd08c938c33212b20208055c83cd8757457c4f78a12d930a390e6"} Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.284984 4982 generic.go:334] "Generic (PLEG): container finished" podID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" containerID="f82e160ec5a40fde0b658f19d15696323a6963bf4c4428bbd80cf60884b41d89" exitCode=0 Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.285087 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbvk8" event={"ID":"33cc7f40-54f9-4057-9291-7d66e4f6ab18","Type":"ContainerDied","Data":"f82e160ec5a40fde0b658f19d15696323a6963bf4c4428bbd80cf60884b41d89"} Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.285124 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbvk8" event={"ID":"33cc7f40-54f9-4057-9291-7d66e4f6ab18","Type":"ContainerStarted","Data":"0f2f0ef1aa7ce7072820601bf5207eb96af57825a9f2d2ee4a4af5fd9092db19"} Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.290425 4982 generic.go:334] "Generic (PLEG): container finished" podID="b9954db1-336a-4478-869c-080166403adb" containerID="a791bde7e48fe024405738a08285715e3857d84a31c2aa58806698a8a2b10dce" exitCode=0 Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.290505 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lhxlk" event={"ID":"b9954db1-336a-4478-869c-080166403adb","Type":"ContainerDied","Data":"a791bde7e48fe024405738a08285715e3857d84a31c2aa58806698a8a2b10dce"} Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.298125 4982 generic.go:334] "Generic (PLEG): container finished" podID="9e7378a9-6783-41a0-94f1-8264296f0d54" containerID="2cb2d6dbb66fcfbb55c9dd5c64d4a00f492d022f23562bd5d14b6503049526ff" exitCode=0 Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.298239 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9e7378a9-6783-41a0-94f1-8264296f0d54","Type":"ContainerDied","Data":"2cb2d6dbb66fcfbb55c9dd5c64d4a00f492d022f23562bd5d14b6503049526ff"} Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.302665 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f","Type":"ContainerStarted","Data":"c7cc7f594566a5810721042f99340ad5b7b1a6ddccf45705272de993aa18d840"} Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.302725 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f","Type":"ContainerStarted","Data":"3cb25ef6bdebf96a68e6145475f9e267b309d35efe7d406011419422a2132eb9"} Dec 05 19:16:24 crc kubenswrapper[4982]: I1205 19:16:24.307822 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-sp47z" Dec 05 19:16:25 crc kubenswrapper[4982]: I1205 19:16:25.221755 4982 patch_prober.go:28] interesting pod/router-default-5444994796-xzjmw container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 19:16:25 crc kubenswrapper[4982]: [-]has-synced failed: reason withheld Dec 05 19:16:25 crc kubenswrapper[4982]: [+]process-running ok Dec 05 19:16:25 crc kubenswrapper[4982]: healthz check failed Dec 05 19:16:25 crc kubenswrapper[4982]: I1205 19:16:25.222143 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xzjmw" podUID="a75c41a7-8e00-45d8-b5a7-5a19d4aa3114" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 19:16:25 crc kubenswrapper[4982]: I1205 19:16:25.310221 4982 generic.go:334] "Generic (PLEG): container finished" podID="7f6ef1c7-b1af-4f06-9ce3-9e001500d08f" containerID="c7cc7f594566a5810721042f99340ad5b7b1a6ddccf45705272de993aa18d840" exitCode=0 Dec 05 19:16:25 crc kubenswrapper[4982]: I1205 19:16:25.310259 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f","Type":"ContainerDied","Data":"c7cc7f594566a5810721042f99340ad5b7b1a6ddccf45705272de993aa18d840"} Dec 05 19:16:25 crc kubenswrapper[4982]: I1205 19:16:25.859594 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 19:16:25 crc kubenswrapper[4982]: I1205 19:16:25.985325 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e7378a9-6783-41a0-94f1-8264296f0d54-kube-api-access\") pod \"9e7378a9-6783-41a0-94f1-8264296f0d54\" (UID: \"9e7378a9-6783-41a0-94f1-8264296f0d54\") " Dec 05 19:16:25 crc kubenswrapper[4982]: I1205 19:16:25.985470 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e7378a9-6783-41a0-94f1-8264296f0d54-kubelet-dir\") pod \"9e7378a9-6783-41a0-94f1-8264296f0d54\" (UID: \"9e7378a9-6783-41a0-94f1-8264296f0d54\") " Dec 05 19:16:25 crc kubenswrapper[4982]: I1205 19:16:25.985883 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9e7378a9-6783-41a0-94f1-8264296f0d54-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9e7378a9-6783-41a0-94f1-8264296f0d54" (UID: "9e7378a9-6783-41a0-94f1-8264296f0d54"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:16:25 crc kubenswrapper[4982]: I1205 19:16:25.994901 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e7378a9-6783-41a0-94f1-8264296f0d54-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9e7378a9-6783-41a0-94f1-8264296f0d54" (UID: "9e7378a9-6783-41a0-94f1-8264296f0d54"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.086611 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9e7378a9-6783-41a0-94f1-8264296f0d54-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.086650 4982 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9e7378a9-6783-41a0-94f1-8264296f0d54-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.230121 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.240498 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-xzjmw" Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.319081 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.320335 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9e7378a9-6783-41a0-94f1-8264296f0d54","Type":"ContainerDied","Data":"7a216e134f121aebe34088219a86e16011d0a2a01c2b9de285eab349a514b931"} Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.320382 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7a216e134f121aebe34088219a86e16011d0a2a01c2b9de285eab349a514b931" Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.599847 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.696197 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7f6ef1c7-b1af-4f06-9ce3-9e001500d08f-kubelet-dir\") pod \"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f\" (UID: \"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f\") " Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.696314 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7f6ef1c7-b1af-4f06-9ce3-9e001500d08f-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "7f6ef1c7-b1af-4f06-9ce3-9e001500d08f" (UID: "7f6ef1c7-b1af-4f06-9ce3-9e001500d08f"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.696330 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f6ef1c7-b1af-4f06-9ce3-9e001500d08f-kube-api-access\") pod \"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f\" (UID: \"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f\") " Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.696694 4982 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/7f6ef1c7-b1af-4f06-9ce3-9e001500d08f-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.710542 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f6ef1c7-b1af-4f06-9ce3-9e001500d08f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "7f6ef1c7-b1af-4f06-9ce3-9e001500d08f" (UID: "7f6ef1c7-b1af-4f06-9ce3-9e001500d08f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:16:26 crc kubenswrapper[4982]: I1205 19:16:26.798657 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7f6ef1c7-b1af-4f06-9ce3-9e001500d08f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 19:16:27 crc kubenswrapper[4982]: I1205 19:16:27.329978 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"7f6ef1c7-b1af-4f06-9ce3-9e001500d08f","Type":"ContainerDied","Data":"3cb25ef6bdebf96a68e6145475f9e267b309d35efe7d406011419422a2132eb9"} Dec 05 19:16:27 crc kubenswrapper[4982]: I1205 19:16:27.330016 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3cb25ef6bdebf96a68e6145475f9e267b309d35efe7d406011419422a2132eb9" Dec 05 19:16:27 crc kubenswrapper[4982]: I1205 19:16:27.330025 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 19:16:29 crc kubenswrapper[4982]: I1205 19:16:29.374420 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-5kg8b" Dec 05 19:16:32 crc kubenswrapper[4982]: I1205 19:16:32.084074 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:16:32 crc kubenswrapper[4982]: I1205 19:16:32.474698 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4d68836a-462d-4364-bc12-b530a7cb0727-metrics-certs\") pod \"network-metrics-daemon-6r5ns\" (UID: \"4d68836a-462d-4364-bc12-b530a7cb0727\") " pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:16:32 crc kubenswrapper[4982]: I1205 19:16:32.710103 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6r5ns" Dec 05 19:16:33 crc kubenswrapper[4982]: I1205 19:16:33.455251 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:33 crc kubenswrapper[4982]: I1205 19:16:33.459265 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:16:41 crc kubenswrapper[4982]: I1205 19:16:41.330665 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:16:42 crc kubenswrapper[4982]: I1205 19:16:42.557492 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:16:42 crc kubenswrapper[4982]: I1205 19:16:42.557832 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:16:52 crc kubenswrapper[4982]: I1205 19:16:52.643323 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 19:16:54 crc kubenswrapper[4982]: I1205 19:16:54.273413 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rj56h" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.482491 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 19:16:58 crc kubenswrapper[4982]: E1205 19:16:58.483797 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e7378a9-6783-41a0-94f1-8264296f0d54" containerName="pruner" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.483817 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e7378a9-6783-41a0-94f1-8264296f0d54" containerName="pruner" Dec 05 19:16:58 crc kubenswrapper[4982]: E1205 19:16:58.483831 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f6ef1c7-b1af-4f06-9ce3-9e001500d08f" containerName="pruner" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.483839 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f6ef1c7-b1af-4f06-9ce3-9e001500d08f" containerName="pruner" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.483934 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f6ef1c7-b1af-4f06-9ce3-9e001500d08f" containerName="pruner" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.483945 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e7378a9-6783-41a0-94f1-8264296f0d54" containerName="pruner" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.484385 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.490710 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.491083 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.496674 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.657249 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc5284f4-8c16-4b83-abb1-3ff841844fbb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bc5284f4-8c16-4b83-abb1-3ff841844fbb\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.657614 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc5284f4-8c16-4b83-abb1-3ff841844fbb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bc5284f4-8c16-4b83-abb1-3ff841844fbb\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.759267 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc5284f4-8c16-4b83-abb1-3ff841844fbb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bc5284f4-8c16-4b83-abb1-3ff841844fbb\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.759517 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc5284f4-8c16-4b83-abb1-3ff841844fbb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bc5284f4-8c16-4b83-abb1-3ff841844fbb\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.759712 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc5284f4-8c16-4b83-abb1-3ff841844fbb-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"bc5284f4-8c16-4b83-abb1-3ff841844fbb\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.789196 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc5284f4-8c16-4b83-abb1-3ff841844fbb-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"bc5284f4-8c16-4b83-abb1-3ff841844fbb\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 19:16:58 crc kubenswrapper[4982]: I1205 19:16:58.817197 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 19:17:00 crc kubenswrapper[4982]: E1205 19:17:00.824843 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 19:17:00 crc kubenswrapper[4982]: E1205 19:17:00.825184 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b5snm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-jnr8b_openshift-marketplace(c63ba14d-0cb7-4a7f-9137-2c0f88ef6726): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 19:17:00 crc kubenswrapper[4982]: E1205 19:17:00.826422 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-jnr8b" podUID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" Dec 05 19:17:00 crc kubenswrapper[4982]: E1205 19:17:00.871223 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 19:17:00 crc kubenswrapper[4982]: E1205 19:17:00.871405 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8k8px,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-6k2jq_openshift-marketplace(4718733b-932d-413f-9b5c-3c8a773df710): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 19:17:00 crc kubenswrapper[4982]: E1205 19:17:00.872600 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-6k2jq" podUID="4718733b-932d-413f-9b5c-3c8a773df710" Dec 05 19:17:00 crc kubenswrapper[4982]: E1205 19:17:00.884586 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage4055634713/3\": happened during read: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 19:17:00 crc kubenswrapper[4982]: E1205 19:17:00.884697 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-shbzf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-hbvk8_openshift-marketplace(33cc7f40-54f9-4057-9291-7d66e4f6ab18): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage4055634713/3\": happened during read: context canceled" logger="UnhandledError" Dec 05 19:17:00 crc kubenswrapper[4982]: E1205 19:17:00.885922 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \\\"/var/tmp/container_images_storage4055634713/3\\\": happened during read: context canceled\"" pod="openshift-marketplace/redhat-operators-hbvk8" podUID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" Dec 05 19:17:01 crc kubenswrapper[4982]: E1205 19:17:01.907384 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-hbvk8" podUID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" Dec 05 19:17:01 crc kubenswrapper[4982]: E1205 19:17:01.908580 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-jnr8b" podUID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" Dec 05 19:17:01 crc kubenswrapper[4982]: E1205 19:17:01.908732 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-6k2jq" podUID="4718733b-932d-413f-9b5c-3c8a773df710" Dec 05 19:17:01 crc kubenswrapper[4982]: E1205 19:17:01.969966 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 19:17:01 crc kubenswrapper[4982]: E1205 19:17:01.970130 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z2rpz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-lhxlk_openshift-marketplace(b9954db1-336a-4478-869c-080166403adb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 19:17:01 crc kubenswrapper[4982]: E1205 19:17:01.971392 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-lhxlk" podUID="b9954db1-336a-4478-869c-080166403adb" Dec 05 19:17:03 crc kubenswrapper[4982]: I1205 19:17:03.877956 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 19:17:03 crc kubenswrapper[4982]: I1205 19:17:03.880347 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:03 crc kubenswrapper[4982]: I1205 19:17:03.887469 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 19:17:04 crc kubenswrapper[4982]: I1205 19:17:04.026798 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d7a3ed08-9759-41ad-8af6-0037be5c7f44-kubelet-dir\") pod \"installer-9-crc\" (UID: \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:04 crc kubenswrapper[4982]: I1205 19:17:04.027192 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d7a3ed08-9759-41ad-8af6-0037be5c7f44-var-lock\") pod \"installer-9-crc\" (UID: \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:04 crc kubenswrapper[4982]: I1205 19:17:04.027258 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d7a3ed08-9759-41ad-8af6-0037be5c7f44-kube-api-access\") pod \"installer-9-crc\" (UID: \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:04 crc kubenswrapper[4982]: I1205 19:17:04.128849 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d7a3ed08-9759-41ad-8af6-0037be5c7f44-var-lock\") pod \"installer-9-crc\" (UID: \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:04 crc kubenswrapper[4982]: I1205 19:17:04.128997 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d7a3ed08-9759-41ad-8af6-0037be5c7f44-var-lock\") pod \"installer-9-crc\" (UID: \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:04 crc kubenswrapper[4982]: I1205 19:17:04.129020 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d7a3ed08-9759-41ad-8af6-0037be5c7f44-kube-api-access\") pod \"installer-9-crc\" (UID: \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:04 crc kubenswrapper[4982]: I1205 19:17:04.129255 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d7a3ed08-9759-41ad-8af6-0037be5c7f44-kubelet-dir\") pod \"installer-9-crc\" (UID: \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:04 crc kubenswrapper[4982]: I1205 19:17:04.129335 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d7a3ed08-9759-41ad-8af6-0037be5c7f44-kubelet-dir\") pod \"installer-9-crc\" (UID: \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:04 crc kubenswrapper[4982]: I1205 19:17:04.152140 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d7a3ed08-9759-41ad-8af6-0037be5c7f44-kube-api-access\") pod \"installer-9-crc\" (UID: \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:04 crc kubenswrapper[4982]: I1205 19:17:04.203076 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:04 crc kubenswrapper[4982]: E1205 19:17:04.758177 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-lhxlk" podUID="b9954db1-336a-4478-869c-080166403adb" Dec 05 19:17:04 crc kubenswrapper[4982]: E1205 19:17:04.835017 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 19:17:04 crc kubenswrapper[4982]: E1205 19:17:04.835223 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2ctnw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-vpkcb_openshift-marketplace(10ea7812-4e92-4291-9929-636eccbae790): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 19:17:04 crc kubenswrapper[4982]: E1205 19:17:04.837273 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-vpkcb" podUID="10ea7812-4e92-4291-9929-636eccbae790" Dec 05 19:17:04 crc kubenswrapper[4982]: E1205 19:17:04.877372 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 19:17:04 crc kubenswrapper[4982]: E1205 19:17:04.877545 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c5527,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-9cd5r_openshift-marketplace(c2800739-c6ca-495c-a39b-b619242e6867): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 19:17:04 crc kubenswrapper[4982]: E1205 19:17:04.879987 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-9cd5r" podUID="c2800739-c6ca-495c-a39b-b619242e6867" Dec 05 19:17:06 crc kubenswrapper[4982]: E1205 19:17:06.127932 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-vpkcb" podUID="10ea7812-4e92-4291-9929-636eccbae790" Dec 05 19:17:06 crc kubenswrapper[4982]: E1205 19:17:06.127944 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-9cd5r" podUID="c2800739-c6ca-495c-a39b-b619242e6867" Dec 05 19:17:06 crc kubenswrapper[4982]: E1205 19:17:06.208068 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 19:17:06 crc kubenswrapper[4982]: E1205 19:17:06.208372 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wcn4x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-bhlmf_openshift-marketplace(335c7642-44cd-41bf-99ac-9c9fcbbe74be): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 19:17:06 crc kubenswrapper[4982]: E1205 19:17:06.209724 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-bhlmf" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" Dec 05 19:17:06 crc kubenswrapper[4982]: E1205 19:17:06.215247 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 19:17:06 crc kubenswrapper[4982]: E1205 19:17:06.215370 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2s6bf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-dzwxm_openshift-marketplace(aab55c2a-f07b-4f88-b89b-fe417ff42c27): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 19:17:06 crc kubenswrapper[4982]: E1205 19:17:06.217342 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-dzwxm" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" Dec 05 19:17:06 crc kubenswrapper[4982]: I1205 19:17:06.540834 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-6r5ns"] Dec 05 19:17:06 crc kubenswrapper[4982]: E1205 19:17:06.548237 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-dzwxm" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" Dec 05 19:17:06 crc kubenswrapper[4982]: E1205 19:17:06.548311 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-bhlmf" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" Dec 05 19:17:06 crc kubenswrapper[4982]: W1205 19:17:06.551846 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d68836a_462d_4364_bc12_b530a7cb0727.slice/crio-890c5b660ec27d52e3631606bb7266604b85458b91af39bedb1b24fce9bc9cbf WatchSource:0}: Error finding container 890c5b660ec27d52e3631606bb7266604b85458b91af39bedb1b24fce9bc9cbf: Status 404 returned error can't find the container with id 890c5b660ec27d52e3631606bb7266604b85458b91af39bedb1b24fce9bc9cbf Dec 05 19:17:06 crc kubenswrapper[4982]: W1205 19:17:06.631225 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podd7a3ed08_9759_41ad_8af6_0037be5c7f44.slice/crio-d236ca7437a3f898f196887a88345a5a52419b89b2897aabcae50cb0432db4be WatchSource:0}: Error finding container d236ca7437a3f898f196887a88345a5a52419b89b2897aabcae50cb0432db4be: Status 404 returned error can't find the container with id d236ca7437a3f898f196887a88345a5a52419b89b2897aabcae50cb0432db4be Dec 05 19:17:06 crc kubenswrapper[4982]: W1205 19:17:06.634546 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podbc5284f4_8c16_4b83_abb1_3ff841844fbb.slice/crio-c01d1de29188533b0f327f01c528dea857fee7788873a459b5f051d67fb8e8be WatchSource:0}: Error finding container c01d1de29188533b0f327f01c528dea857fee7788873a459b5f051d67fb8e8be: Status 404 returned error can't find the container with id c01d1de29188533b0f327f01c528dea857fee7788873a459b5f051d67fb8e8be Dec 05 19:17:06 crc kubenswrapper[4982]: I1205 19:17:06.638337 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 19:17:06 crc kubenswrapper[4982]: I1205 19:17:06.643121 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 19:17:07 crc kubenswrapper[4982]: I1205 19:17:07.549308 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"bc5284f4-8c16-4b83-abb1-3ff841844fbb","Type":"ContainerStarted","Data":"67e8898c26806e7c46e2e981722a885a7a70ea253b940b6a508c830f44b079e5"} Dec 05 19:17:07 crc kubenswrapper[4982]: I1205 19:17:07.550995 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"bc5284f4-8c16-4b83-abb1-3ff841844fbb","Type":"ContainerStarted","Data":"c01d1de29188533b0f327f01c528dea857fee7788873a459b5f051d67fb8e8be"} Dec 05 19:17:07 crc kubenswrapper[4982]: I1205 19:17:07.551088 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" event={"ID":"4d68836a-462d-4364-bc12-b530a7cb0727","Type":"ContainerStarted","Data":"11e5eab8f746a39de989f88db524c84da774f836ac54ac4aa72d15b32d5c3fd4"} Dec 05 19:17:07 crc kubenswrapper[4982]: I1205 19:17:07.551217 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" event={"ID":"4d68836a-462d-4364-bc12-b530a7cb0727","Type":"ContainerStarted","Data":"a1777e0932f0f4978bc8031151c92b7de2ed32c142a5510fa86487be66a38f79"} Dec 05 19:17:07 crc kubenswrapper[4982]: I1205 19:17:07.551299 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-6r5ns" event={"ID":"4d68836a-462d-4364-bc12-b530a7cb0727","Type":"ContainerStarted","Data":"890c5b660ec27d52e3631606bb7266604b85458b91af39bedb1b24fce9bc9cbf"} Dec 05 19:17:07 crc kubenswrapper[4982]: I1205 19:17:07.552980 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d7a3ed08-9759-41ad-8af6-0037be5c7f44","Type":"ContainerStarted","Data":"ea3dbefe860c8112e6d02bdb5a99edcc3e6b99446d3148581f6697c865ca52e8"} Dec 05 19:17:07 crc kubenswrapper[4982]: I1205 19:17:07.553073 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d7a3ed08-9759-41ad-8af6-0037be5c7f44","Type":"ContainerStarted","Data":"d236ca7437a3f898f196887a88345a5a52419b89b2897aabcae50cb0432db4be"} Dec 05 19:17:07 crc kubenswrapper[4982]: I1205 19:17:07.562595 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=9.562580129 podStartE2EDuration="9.562580129s" podCreationTimestamp="2025-12-05 19:16:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:17:07.560029354 +0000 UTC m=+206.441915349" watchObservedRunningTime="2025-12-05 19:17:07.562580129 +0000 UTC m=+206.444466124" Dec 05 19:17:07 crc kubenswrapper[4982]: I1205 19:17:07.596346 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-6r5ns" podStartSLOduration=178.596327562 podStartE2EDuration="2m58.596327562s" podCreationTimestamp="2025-12-05 19:14:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:17:07.580631541 +0000 UTC m=+206.462517556" watchObservedRunningTime="2025-12-05 19:17:07.596327562 +0000 UTC m=+206.478213557" Dec 05 19:17:07 crc kubenswrapper[4982]: I1205 19:17:07.596999 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=4.596991439 podStartE2EDuration="4.596991439s" podCreationTimestamp="2025-12-05 19:17:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:17:07.593585672 +0000 UTC m=+206.475471677" watchObservedRunningTime="2025-12-05 19:17:07.596991439 +0000 UTC m=+206.478877444" Dec 05 19:17:08 crc kubenswrapper[4982]: I1205 19:17:08.559283 4982 generic.go:334] "Generic (PLEG): container finished" podID="bc5284f4-8c16-4b83-abb1-3ff841844fbb" containerID="67e8898c26806e7c46e2e981722a885a7a70ea253b940b6a508c830f44b079e5" exitCode=0 Dec 05 19:17:08 crc kubenswrapper[4982]: I1205 19:17:08.559947 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"bc5284f4-8c16-4b83-abb1-3ff841844fbb","Type":"ContainerDied","Data":"67e8898c26806e7c46e2e981722a885a7a70ea253b940b6a508c830f44b079e5"} Dec 05 19:17:09 crc kubenswrapper[4982]: I1205 19:17:09.817370 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 19:17:09 crc kubenswrapper[4982]: I1205 19:17:09.923840 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc5284f4-8c16-4b83-abb1-3ff841844fbb-kube-api-access\") pod \"bc5284f4-8c16-4b83-abb1-3ff841844fbb\" (UID: \"bc5284f4-8c16-4b83-abb1-3ff841844fbb\") " Dec 05 19:17:09 crc kubenswrapper[4982]: I1205 19:17:09.923973 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc5284f4-8c16-4b83-abb1-3ff841844fbb-kubelet-dir\") pod \"bc5284f4-8c16-4b83-abb1-3ff841844fbb\" (UID: \"bc5284f4-8c16-4b83-abb1-3ff841844fbb\") " Dec 05 19:17:09 crc kubenswrapper[4982]: I1205 19:17:09.924213 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bc5284f4-8c16-4b83-abb1-3ff841844fbb-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "bc5284f4-8c16-4b83-abb1-3ff841844fbb" (UID: "bc5284f4-8c16-4b83-abb1-3ff841844fbb"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:17:09 crc kubenswrapper[4982]: I1205 19:17:09.930446 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5284f4-8c16-4b83-abb1-3ff841844fbb-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "bc5284f4-8c16-4b83-abb1-3ff841844fbb" (UID: "bc5284f4-8c16-4b83-abb1-3ff841844fbb"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:17:10 crc kubenswrapper[4982]: I1205 19:17:10.025702 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bc5284f4-8c16-4b83-abb1-3ff841844fbb-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:10 crc kubenswrapper[4982]: I1205 19:17:10.025759 4982 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bc5284f4-8c16-4b83-abb1-3ff841844fbb-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:10 crc kubenswrapper[4982]: I1205 19:17:10.574196 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"bc5284f4-8c16-4b83-abb1-3ff841844fbb","Type":"ContainerDied","Data":"c01d1de29188533b0f327f01c528dea857fee7788873a459b5f051d67fb8e8be"} Dec 05 19:17:10 crc kubenswrapper[4982]: I1205 19:17:10.574532 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c01d1de29188533b0f327f01c528dea857fee7788873a459b5f051d67fb8e8be" Dec 05 19:17:10 crc kubenswrapper[4982]: I1205 19:17:10.574247 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 19:17:12 crc kubenswrapper[4982]: I1205 19:17:12.557258 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:17:12 crc kubenswrapper[4982]: I1205 19:17:12.557643 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:17:12 crc kubenswrapper[4982]: I1205 19:17:12.557692 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:17:12 crc kubenswrapper[4982]: I1205 19:17:12.558401 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 19:17:12 crc kubenswrapper[4982]: I1205 19:17:12.558513 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2" gracePeriod=600 Dec 05 19:17:13 crc kubenswrapper[4982]: I1205 19:17:13.596073 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2" exitCode=0 Dec 05 19:17:13 crc kubenswrapper[4982]: I1205 19:17:13.596127 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2"} Dec 05 19:17:14 crc kubenswrapper[4982]: I1205 19:17:14.603484 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbvk8" event={"ID":"33cc7f40-54f9-4057-9291-7d66e4f6ab18","Type":"ContainerStarted","Data":"caab3dc6cf348f0a491612c6d71fc1d36f61b185451cf186823f4d56b36d9f45"} Dec 05 19:17:14 crc kubenswrapper[4982]: I1205 19:17:14.607297 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"895fd01052264be16f34513524aa7f1a8429b4edeb2f6b2f6fd3e84819765ac4"} Dec 05 19:17:15 crc kubenswrapper[4982]: I1205 19:17:15.613166 4982 generic.go:334] "Generic (PLEG): container finished" podID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" containerID="caab3dc6cf348f0a491612c6d71fc1d36f61b185451cf186823f4d56b36d9f45" exitCode=0 Dec 05 19:17:15 crc kubenswrapper[4982]: I1205 19:17:15.613204 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbvk8" event={"ID":"33cc7f40-54f9-4057-9291-7d66e4f6ab18","Type":"ContainerDied","Data":"caab3dc6cf348f0a491612c6d71fc1d36f61b185451cf186823f4d56b36d9f45"} Dec 05 19:17:16 crc kubenswrapper[4982]: I1205 19:17:16.620055 4982 generic.go:334] "Generic (PLEG): container finished" podID="4718733b-932d-413f-9b5c-3c8a773df710" containerID="5aefd43417f9d9adea4bae5ba678e6cafd8a723bfdcd89c0685f3112e4e06bbd" exitCode=0 Dec 05 19:17:16 crc kubenswrapper[4982]: I1205 19:17:16.620172 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6k2jq" event={"ID":"4718733b-932d-413f-9b5c-3c8a773df710","Type":"ContainerDied","Data":"5aefd43417f9d9adea4bae5ba678e6cafd8a723bfdcd89c0685f3112e4e06bbd"} Dec 05 19:17:16 crc kubenswrapper[4982]: I1205 19:17:16.622731 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbvk8" event={"ID":"33cc7f40-54f9-4057-9291-7d66e4f6ab18","Type":"ContainerStarted","Data":"4d9393563bf6bcf82c847612352d75f11a292c1c43c8651253d20bdec9211c19"} Dec 05 19:17:16 crc kubenswrapper[4982]: I1205 19:17:16.653102 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hbvk8" podStartSLOduration=1.777878661 podStartE2EDuration="53.653079842s" podCreationTimestamp="2025-12-05 19:16:23 +0000 UTC" firstStartedPulling="2025-12-05 19:16:24.28908994 +0000 UTC m=+163.170975935" lastFinishedPulling="2025-12-05 19:17:16.164291121 +0000 UTC m=+215.046177116" observedRunningTime="2025-12-05 19:17:16.651695487 +0000 UTC m=+215.533581482" watchObservedRunningTime="2025-12-05 19:17:16.653079842 +0000 UTC m=+215.534965877" Dec 05 19:17:18 crc kubenswrapper[4982]: I1205 19:17:18.636125 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6k2jq" event={"ID":"4718733b-932d-413f-9b5c-3c8a773df710","Type":"ContainerStarted","Data":"00033f8d46e76911d11888b7f94d3ab7911179aa07c464e83b43ea291302eef4"} Dec 05 19:17:18 crc kubenswrapper[4982]: I1205 19:17:18.638895 4982 generic.go:334] "Generic (PLEG): container finished" podID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" containerID="0e1f684e6670839e09cd9ddc02df45270e03f8c6963d41a287ae5758cb7c1c62" exitCode=0 Dec 05 19:17:18 crc kubenswrapper[4982]: I1205 19:17:18.638940 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnr8b" event={"ID":"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726","Type":"ContainerDied","Data":"0e1f684e6670839e09cd9ddc02df45270e03f8c6963d41a287ae5758cb7c1c62"} Dec 05 19:17:18 crc kubenswrapper[4982]: I1205 19:17:18.657463 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6k2jq" podStartSLOduration=3.176787033 podStartE2EDuration="59.657441174s" podCreationTimestamp="2025-12-05 19:16:19 +0000 UTC" firstStartedPulling="2025-12-05 19:16:21.160909381 +0000 UTC m=+160.042795376" lastFinishedPulling="2025-12-05 19:17:17.641563522 +0000 UTC m=+216.523449517" observedRunningTime="2025-12-05 19:17:18.656976662 +0000 UTC m=+217.538862657" watchObservedRunningTime="2025-12-05 19:17:18.657441174 +0000 UTC m=+217.539327179" Dec 05 19:17:19 crc kubenswrapper[4982]: I1205 19:17:19.651461 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnr8b" event={"ID":"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726","Type":"ContainerStarted","Data":"a2589f3bc5208794a8172b146fb0680db5362deceb063d82adbed0987b14cfff"} Dec 05 19:17:19 crc kubenswrapper[4982]: I1205 19:17:19.668211 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jnr8b" podStartSLOduration=2.475551575 podStartE2EDuration="1m0.668191594s" podCreationTimestamp="2025-12-05 19:16:19 +0000 UTC" firstStartedPulling="2025-12-05 19:16:21.138850064 +0000 UTC m=+160.020736059" lastFinishedPulling="2025-12-05 19:17:19.331490083 +0000 UTC m=+218.213376078" observedRunningTime="2025-12-05 19:17:19.666327157 +0000 UTC m=+218.548213152" watchObservedRunningTime="2025-12-05 19:17:19.668191594 +0000 UTC m=+218.550077589" Dec 05 19:17:19 crc kubenswrapper[4982]: I1205 19:17:19.841389 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:17:19 crc kubenswrapper[4982]: I1205 19:17:19.841479 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:17:20 crc kubenswrapper[4982]: I1205 19:17:20.255820 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:17:20 crc kubenswrapper[4982]: I1205 19:17:20.263560 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:17:20 crc kubenswrapper[4982]: I1205 19:17:20.263613 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:17:20 crc kubenswrapper[4982]: I1205 19:17:20.658715 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpkcb" event={"ID":"10ea7812-4e92-4291-9929-636eccbae790","Type":"ContainerStarted","Data":"a1d681d6be5132121be6bfa3055a44a3d39fd49d4ad1ccdde31e86265cff01bf"} Dec 05 19:17:21 crc kubenswrapper[4982]: I1205 19:17:21.302370 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-jnr8b" podUID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" containerName="registry-server" probeResult="failure" output=< Dec 05 19:17:21 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Dec 05 19:17:21 crc kubenswrapper[4982]: > Dec 05 19:17:21 crc kubenswrapper[4982]: I1205 19:17:21.665817 4982 generic.go:334] "Generic (PLEG): container finished" podID="b9954db1-336a-4478-869c-080166403adb" containerID="018cd2ae89257d15cf32dfd4ce51657e9d1e0da10b5cd25a742b7562b7b11903" exitCode=0 Dec 05 19:17:21 crc kubenswrapper[4982]: I1205 19:17:21.665875 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lhxlk" event={"ID":"b9954db1-336a-4478-869c-080166403adb","Type":"ContainerDied","Data":"018cd2ae89257d15cf32dfd4ce51657e9d1e0da10b5cd25a742b7562b7b11903"} Dec 05 19:17:21 crc kubenswrapper[4982]: I1205 19:17:21.668989 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhlmf" event={"ID":"335c7642-44cd-41bf-99ac-9c9fcbbe74be","Type":"ContainerStarted","Data":"f9b4d5a94053b6a48c9297dadd89c062f33f9e9797a5180fe156192e6f9ff40c"} Dec 05 19:17:21 crc kubenswrapper[4982]: I1205 19:17:21.670458 4982 generic.go:334] "Generic (PLEG): container finished" podID="c2800739-c6ca-495c-a39b-b619242e6867" containerID="2bbe9551a0d207b6ae2bdc5560028c186d4d64b63562c9988a4790654328c080" exitCode=0 Dec 05 19:17:21 crc kubenswrapper[4982]: I1205 19:17:21.670523 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9cd5r" event={"ID":"c2800739-c6ca-495c-a39b-b619242e6867","Type":"ContainerDied","Data":"2bbe9551a0d207b6ae2bdc5560028c186d4d64b63562c9988a4790654328c080"} Dec 05 19:17:21 crc kubenswrapper[4982]: I1205 19:17:21.672747 4982 generic.go:334] "Generic (PLEG): container finished" podID="10ea7812-4e92-4291-9929-636eccbae790" containerID="a1d681d6be5132121be6bfa3055a44a3d39fd49d4ad1ccdde31e86265cff01bf" exitCode=0 Dec 05 19:17:21 crc kubenswrapper[4982]: I1205 19:17:21.672798 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpkcb" event={"ID":"10ea7812-4e92-4291-9929-636eccbae790","Type":"ContainerDied","Data":"a1d681d6be5132121be6bfa3055a44a3d39fd49d4ad1ccdde31e86265cff01bf"} Dec 05 19:17:22 crc kubenswrapper[4982]: I1205 19:17:22.689499 4982 generic.go:334] "Generic (PLEG): container finished" podID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" containerID="f9b4d5a94053b6a48c9297dadd89c062f33f9e9797a5180fe156192e6f9ff40c" exitCode=0 Dec 05 19:17:22 crc kubenswrapper[4982]: I1205 19:17:22.689546 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhlmf" event={"ID":"335c7642-44cd-41bf-99ac-9c9fcbbe74be","Type":"ContainerDied","Data":"f9b4d5a94053b6a48c9297dadd89c062f33f9e9797a5180fe156192e6f9ff40c"} Dec 05 19:17:23 crc kubenswrapper[4982]: I1205 19:17:23.459358 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:17:23 crc kubenswrapper[4982]: I1205 19:17:23.459709 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:17:23 crc kubenswrapper[4982]: I1205 19:17:23.512764 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:17:23 crc kubenswrapper[4982]: I1205 19:17:23.762478 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:17:25 crc kubenswrapper[4982]: I1205 19:17:25.635533 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hbvk8"] Dec 05 19:17:26 crc kubenswrapper[4982]: I1205 19:17:26.712563 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hbvk8" podUID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" containerName="registry-server" containerID="cri-o://4d9393563bf6bcf82c847612352d75f11a292c1c43c8651253d20bdec9211c19" gracePeriod=2 Dec 05 19:17:29 crc kubenswrapper[4982]: I1205 19:17:29.733768 4982 generic.go:334] "Generic (PLEG): container finished" podID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" containerID="4d9393563bf6bcf82c847612352d75f11a292c1c43c8651253d20bdec9211c19" exitCode=0 Dec 05 19:17:29 crc kubenswrapper[4982]: I1205 19:17:29.733854 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbvk8" event={"ID":"33cc7f40-54f9-4057-9291-7d66e4f6ab18","Type":"ContainerDied","Data":"4d9393563bf6bcf82c847612352d75f11a292c1c43c8651253d20bdec9211c19"} Dec 05 19:17:29 crc kubenswrapper[4982]: I1205 19:17:29.911401 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:17:30 crc kubenswrapper[4982]: I1205 19:17:30.332853 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:17:30 crc kubenswrapper[4982]: I1205 19:17:30.400083 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:17:31 crc kubenswrapper[4982]: I1205 19:17:31.235105 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jnr8b"] Dec 05 19:17:31 crc kubenswrapper[4982]: I1205 19:17:31.743761 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jnr8b" podUID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" containerName="registry-server" containerID="cri-o://a2589f3bc5208794a8172b146fb0680db5362deceb063d82adbed0987b14cfff" gracePeriod=2 Dec 05 19:17:32 crc kubenswrapper[4982]: I1205 19:17:32.753269 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hbvk8" event={"ID":"33cc7f40-54f9-4057-9291-7d66e4f6ab18","Type":"ContainerDied","Data":"0f2f0ef1aa7ce7072820601bf5207eb96af57825a9f2d2ee4a4af5fd9092db19"} Dec 05 19:17:32 crc kubenswrapper[4982]: I1205 19:17:32.753331 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f2f0ef1aa7ce7072820601bf5207eb96af57825a9f2d2ee4a4af5fd9092db19" Dec 05 19:17:32 crc kubenswrapper[4982]: I1205 19:17:32.780696 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:17:32 crc kubenswrapper[4982]: I1205 19:17:32.936621 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33cc7f40-54f9-4057-9291-7d66e4f6ab18-utilities\") pod \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\" (UID: \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\") " Dec 05 19:17:32 crc kubenswrapper[4982]: I1205 19:17:32.936735 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33cc7f40-54f9-4057-9291-7d66e4f6ab18-catalog-content\") pod \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\" (UID: \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\") " Dec 05 19:17:32 crc kubenswrapper[4982]: I1205 19:17:32.936802 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-shbzf\" (UniqueName: \"kubernetes.io/projected/33cc7f40-54f9-4057-9291-7d66e4f6ab18-kube-api-access-shbzf\") pod \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\" (UID: \"33cc7f40-54f9-4057-9291-7d66e4f6ab18\") " Dec 05 19:17:32 crc kubenswrapper[4982]: I1205 19:17:32.938704 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33cc7f40-54f9-4057-9291-7d66e4f6ab18-utilities" (OuterVolumeSpecName: "utilities") pod "33cc7f40-54f9-4057-9291-7d66e4f6ab18" (UID: "33cc7f40-54f9-4057-9291-7d66e4f6ab18"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:17:32 crc kubenswrapper[4982]: I1205 19:17:32.943141 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33cc7f40-54f9-4057-9291-7d66e4f6ab18-kube-api-access-shbzf" (OuterVolumeSpecName: "kube-api-access-shbzf") pod "33cc7f40-54f9-4057-9291-7d66e4f6ab18" (UID: "33cc7f40-54f9-4057-9291-7d66e4f6ab18"). InnerVolumeSpecName "kube-api-access-shbzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:17:33 crc kubenswrapper[4982]: I1205 19:17:33.038238 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-shbzf\" (UniqueName: \"kubernetes.io/projected/33cc7f40-54f9-4057-9291-7d66e4f6ab18-kube-api-access-shbzf\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:33 crc kubenswrapper[4982]: I1205 19:17:33.038291 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/33cc7f40-54f9-4057-9291-7d66e4f6ab18-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:33 crc kubenswrapper[4982]: I1205 19:17:33.090384 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/33cc7f40-54f9-4057-9291-7d66e4f6ab18-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "33cc7f40-54f9-4057-9291-7d66e4f6ab18" (UID: "33cc7f40-54f9-4057-9291-7d66e4f6ab18"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:17:33 crc kubenswrapper[4982]: I1205 19:17:33.139233 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/33cc7f40-54f9-4057-9291-7d66e4f6ab18-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:33 crc kubenswrapper[4982]: I1205 19:17:33.762423 4982 generic.go:334] "Generic (PLEG): container finished" podID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" containerID="a2589f3bc5208794a8172b146fb0680db5362deceb063d82adbed0987b14cfff" exitCode=0 Dec 05 19:17:33 crc kubenswrapper[4982]: I1205 19:17:33.762625 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hbvk8" Dec 05 19:17:33 crc kubenswrapper[4982]: I1205 19:17:33.762642 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnr8b" event={"ID":"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726","Type":"ContainerDied","Data":"a2589f3bc5208794a8172b146fb0680db5362deceb063d82adbed0987b14cfff"} Dec 05 19:17:33 crc kubenswrapper[4982]: I1205 19:17:33.781471 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hbvk8"] Dec 05 19:17:33 crc kubenswrapper[4982]: I1205 19:17:33.787677 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hbvk8"] Dec 05 19:17:35 crc kubenswrapper[4982]: I1205 19:17:35.398904 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" path="/var/lib/kubelet/pods/33cc7f40-54f9-4057-9291-7d66e4f6ab18/volumes" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.462941 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.589283 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5snm\" (UniqueName: \"kubernetes.io/projected/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-kube-api-access-b5snm\") pod \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\" (UID: \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\") " Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.589413 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-utilities\") pod \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\" (UID: \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\") " Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.589512 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-catalog-content\") pod \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\" (UID: \"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726\") " Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.593057 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-utilities" (OuterVolumeSpecName: "utilities") pod "c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" (UID: "c63ba14d-0cb7-4a7f-9137-2c0f88ef6726"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.595323 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-kube-api-access-b5snm" (OuterVolumeSpecName: "kube-api-access-b5snm") pod "c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" (UID: "c63ba14d-0cb7-4a7f-9137-2c0f88ef6726"). InnerVolumeSpecName "kube-api-access-b5snm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.639087 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" (UID: "c63ba14d-0cb7-4a7f-9137-2c0f88ef6726"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.690944 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.691091 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.691187 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5snm\" (UniqueName: \"kubernetes.io/projected/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726-kube-api-access-b5snm\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.785035 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhlmf" event={"ID":"335c7642-44cd-41bf-99ac-9c9fcbbe74be","Type":"ContainerStarted","Data":"9d37e0b5186e5e7f9ab770337506c93b4ffc0d7d29da9e8b71317480d69c7b8c"} Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.787681 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9cd5r" event={"ID":"c2800739-c6ca-495c-a39b-b619242e6867","Type":"ContainerStarted","Data":"580fd60b8f07f6fe91fcfe89fdb182cf3145691c2a274a8dec231f0c90a25bbf"} Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.789760 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpkcb" event={"ID":"10ea7812-4e92-4291-9929-636eccbae790","Type":"ContainerStarted","Data":"895d3d72e532abde4513adbdb44e1bd894af3f6a60696b533531e9fe535e5ce4"} Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.792389 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jnr8b" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.792388 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jnr8b" event={"ID":"c63ba14d-0cb7-4a7f-9137-2c0f88ef6726","Type":"ContainerDied","Data":"e61b2eaf43957ba80224652f4ac179febf4be103255b5393d2898b08e1d39eb3"} Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.792494 4982 scope.go:117] "RemoveContainer" containerID="a2589f3bc5208794a8172b146fb0680db5362deceb063d82adbed0987b14cfff" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.818221 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bhlmf" podStartSLOduration=2.558001528 podStartE2EDuration="1m17.81818041s" podCreationTimestamp="2025-12-05 19:16:19 +0000 UTC" firstStartedPulling="2025-12-05 19:16:21.149282974 +0000 UTC m=+160.031168969" lastFinishedPulling="2025-12-05 19:17:36.409461826 +0000 UTC m=+235.291347851" observedRunningTime="2025-12-05 19:17:36.814840554 +0000 UTC m=+235.696726549" watchObservedRunningTime="2025-12-05 19:17:36.81818041 +0000 UTC m=+235.700066405" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.818542 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzwxm" event={"ID":"aab55c2a-f07b-4f88-b89b-fe417ff42c27","Type":"ContainerStarted","Data":"806ab79ebe57359ff3016635f4539375465a4adff70384e1cc5ff41c22ffa58a"} Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.823317 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lhxlk" event={"ID":"b9954db1-336a-4478-869c-080166403adb","Type":"ContainerStarted","Data":"15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5"} Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.829434 4982 scope.go:117] "RemoveContainer" containerID="0e1f684e6670839e09cd9ddc02df45270e03f8c6963d41a287ae5758cb7c1c62" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.837450 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-9cd5r" podStartSLOduration=3.723989606 podStartE2EDuration="1m15.837432182s" podCreationTimestamp="2025-12-05 19:16:21 +0000 UTC" firstStartedPulling="2025-12-05 19:16:24.2838823 +0000 UTC m=+163.165768295" lastFinishedPulling="2025-12-05 19:17:36.397324836 +0000 UTC m=+235.279210871" observedRunningTime="2025-12-05 19:17:36.835167374 +0000 UTC m=+235.717053389" watchObservedRunningTime="2025-12-05 19:17:36.837432182 +0000 UTC m=+235.719318187" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.847172 4982 scope.go:117] "RemoveContainer" containerID="40f5f940257980b49dfab7974efde9c6a0697ba5331cf7b260c651eddb331e64" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.855585 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vpkcb" podStartSLOduration=2.707751703 podStartE2EDuration="1m14.855561616s" podCreationTimestamp="2025-12-05 19:16:22 +0000 UTC" firstStartedPulling="2025-12-05 19:16:24.283043041 +0000 UTC m=+163.164929036" lastFinishedPulling="2025-12-05 19:17:36.430852944 +0000 UTC m=+235.312738949" observedRunningTime="2025-12-05 19:17:36.851855751 +0000 UTC m=+235.733741766" watchObservedRunningTime="2025-12-05 19:17:36.855561616 +0000 UTC m=+235.737447621" Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.884787 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jnr8b"] Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.888616 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jnr8b"] Dec 05 19:17:36 crc kubenswrapper[4982]: I1205 19:17:36.911106 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lhxlk" podStartSLOduration=6.468843654 podStartE2EDuration="1m14.911092326s" podCreationTimestamp="2025-12-05 19:16:22 +0000 UTC" firstStartedPulling="2025-12-05 19:16:24.294482474 +0000 UTC m=+163.176368469" lastFinishedPulling="2025-12-05 19:17:32.736731106 +0000 UTC m=+231.618617141" observedRunningTime="2025-12-05 19:17:36.909244219 +0000 UTC m=+235.791130214" watchObservedRunningTime="2025-12-05 19:17:36.911092326 +0000 UTC m=+235.792978321" Dec 05 19:17:37 crc kubenswrapper[4982]: I1205 19:17:37.397006 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" path="/var/lib/kubelet/pods/c63ba14d-0cb7-4a7f-9137-2c0f88ef6726/volumes" Dec 05 19:17:37 crc kubenswrapper[4982]: I1205 19:17:37.831937 4982 generic.go:334] "Generic (PLEG): container finished" podID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" containerID="806ab79ebe57359ff3016635f4539375465a4adff70384e1cc5ff41c22ffa58a" exitCode=0 Dec 05 19:17:37 crc kubenswrapper[4982]: I1205 19:17:37.831984 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzwxm" event={"ID":"aab55c2a-f07b-4f88-b89b-fe417ff42c27","Type":"ContainerDied","Data":"806ab79ebe57359ff3016635f4539375465a4adff70384e1cc5ff41c22ffa58a"} Dec 05 19:17:38 crc kubenswrapper[4982]: I1205 19:17:38.841306 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzwxm" event={"ID":"aab55c2a-f07b-4f88-b89b-fe417ff42c27","Type":"ContainerStarted","Data":"3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24"} Dec 05 19:17:38 crc kubenswrapper[4982]: I1205 19:17:38.865635 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dzwxm" podStartSLOduration=1.717024909 podStartE2EDuration="1m18.865612533s" podCreationTimestamp="2025-12-05 19:16:20 +0000 UTC" firstStartedPulling="2025-12-05 19:16:21.147320149 +0000 UTC m=+160.029206144" lastFinishedPulling="2025-12-05 19:17:38.295907773 +0000 UTC m=+237.177793768" observedRunningTime="2025-12-05 19:17:38.86079014 +0000 UTC m=+237.742676155" watchObservedRunningTime="2025-12-05 19:17:38.865612533 +0000 UTC m=+237.747498548" Dec 05 19:17:40 crc kubenswrapper[4982]: I1205 19:17:40.036223 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:17:40 crc kubenswrapper[4982]: I1205 19:17:40.036282 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:17:40 crc kubenswrapper[4982]: I1205 19:17:40.076070 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:17:40 crc kubenswrapper[4982]: I1205 19:17:40.561744 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:17:40 crc kubenswrapper[4982]: I1205 19:17:40.561822 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:17:40 crc kubenswrapper[4982]: I1205 19:17:40.617121 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:17:42 crc kubenswrapper[4982]: I1205 19:17:42.025979 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:17:42 crc kubenswrapper[4982]: I1205 19:17:42.026317 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:17:42 crc kubenswrapper[4982]: I1205 19:17:42.083944 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:17:42 crc kubenswrapper[4982]: I1205 19:17:42.456020 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:17:42 crc kubenswrapper[4982]: I1205 19:17:42.457203 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:17:42 crc kubenswrapper[4982]: I1205 19:17:42.542501 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9z6x6"] Dec 05 19:17:42 crc kubenswrapper[4982]: I1205 19:17:42.553333 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:17:42 crc kubenswrapper[4982]: I1205 19:17:42.899641 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:17:42 crc kubenswrapper[4982]: I1205 19:17:42.901242 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:17:43 crc kubenswrapper[4982]: I1205 19:17:43.050059 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:17:43 crc kubenswrapper[4982]: I1205 19:17:43.050193 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:17:43 crc kubenswrapper[4982]: I1205 19:17:43.097957 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:17:43 crc kubenswrapper[4982]: I1205 19:17:43.935877 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.656345 4982 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.656672 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" containerName="registry-server" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.656693 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" containerName="registry-server" Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.656714 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" containerName="extract-utilities" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.656730 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" containerName="extract-utilities" Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.656750 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" containerName="registry-server" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.656765 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" containerName="registry-server" Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.656789 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" containerName="extract-content" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.656804 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" containerName="extract-content" Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.656833 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc5284f4-8c16-4b83-abb1-3ff841844fbb" containerName="pruner" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.656849 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc5284f4-8c16-4b83-abb1-3ff841844fbb" containerName="pruner" Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.656879 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" containerName="extract-content" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.656894 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" containerName="extract-content" Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.656916 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" containerName="extract-utilities" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.656933 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" containerName="extract-utilities" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.657206 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="33cc7f40-54f9-4057-9291-7d66e4f6ab18" containerName="registry-server" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.657242 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc5284f4-8c16-4b83-abb1-3ff841844fbb" containerName="pruner" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.657284 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c63ba14d-0cb7-4a7f-9137-2c0f88ef6726" containerName="registry-server" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.657756 4982 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.658205 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34" gracePeriod=15 Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.658440 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.659045 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111" gracePeriod=15 Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.659132 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953" gracePeriod=15 Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.659229 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58" gracePeriod=15 Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.659284 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b" gracePeriod=15 Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.662291 4982 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.662573 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.662599 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.662630 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.662649 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.662665 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.662682 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.662701 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.662718 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.662742 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.662758 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.662795 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.662814 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 19:17:44 crc kubenswrapper[4982]: E1205 19:17:44.662838 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.662854 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.663076 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.663113 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.663140 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.663213 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.663234 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.663248 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.697463 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.697527 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.697570 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.697634 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.697672 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.697711 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.697933 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.697971 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799260 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799310 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799345 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799397 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799421 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799465 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799489 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799509 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799585 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799627 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799655 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799680 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799707 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799732 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799757 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:44 crc kubenswrapper[4982]: I1205 19:17:44.799782 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:46 crc kubenswrapper[4982]: I1205 19:17:46.892785 4982 generic.go:334] "Generic (PLEG): container finished" podID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" containerID="ea3dbefe860c8112e6d02bdb5a99edcc3e6b99446d3148581f6697c865ca52e8" exitCode=0 Dec 05 19:17:46 crc kubenswrapper[4982]: I1205 19:17:46.892830 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d7a3ed08-9759-41ad-8af6-0037be5c7f44","Type":"ContainerDied","Data":"ea3dbefe860c8112e6d02bdb5a99edcc3e6b99446d3148581f6697c865ca52e8"} Dec 05 19:17:46 crc kubenswrapper[4982]: I1205 19:17:46.894464 4982 status_manager.go:851] "Failed to get status for pod" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:46 crc kubenswrapper[4982]: I1205 19:17:46.896702 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 19:17:46 crc kubenswrapper[4982]: I1205 19:17:46.898437 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 19:17:46 crc kubenswrapper[4982]: I1205 19:17:46.899369 4982 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111" exitCode=0 Dec 05 19:17:46 crc kubenswrapper[4982]: I1205 19:17:46.899401 4982 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953" exitCode=0 Dec 05 19:17:46 crc kubenswrapper[4982]: I1205 19:17:46.899410 4982 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58" exitCode=0 Dec 05 19:17:46 crc kubenswrapper[4982]: I1205 19:17:46.899420 4982 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b" exitCode=2 Dec 05 19:17:46 crc kubenswrapper[4982]: I1205 19:17:46.899493 4982 scope.go:117] "RemoveContainer" containerID="881b00d786dee4e433a5e6ce402ca9fdc9c38ef3a24e8db6a56e7b9a7e730646" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.558331 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.559833 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.560614 4982 status_manager.go:851] "Failed to get status for pod" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.561192 4982 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:47 crc kubenswrapper[4982]: E1205 19:17:47.620204 4982 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:47 crc kubenswrapper[4982]: E1205 19:17:47.620619 4982 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:47 crc kubenswrapper[4982]: E1205 19:17:47.621061 4982 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:47 crc kubenswrapper[4982]: E1205 19:17:47.622248 4982 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:47 crc kubenswrapper[4982]: E1205 19:17:47.623583 4982 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.623629 4982 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 19:17:47 crc kubenswrapper[4982]: E1205 19:17:47.623992 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" interval="200ms" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.638981 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.639056 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.639190 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.639300 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.639310 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.639329 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.639804 4982 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.639848 4982 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.639878 4982 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:47 crc kubenswrapper[4982]: E1205 19:17:47.828727 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" interval="400ms" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.908205 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.909180 4982 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34" exitCode=0 Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.909313 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.909404 4982 scope.go:117] "RemoveContainer" containerID="fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.926094 4982 scope.go:117] "RemoveContainer" containerID="3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.927092 4982 status_manager.go:851] "Failed to get status for pod" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.927750 4982 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.946545 4982 scope.go:117] "RemoveContainer" containerID="7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.964698 4982 scope.go:117] "RemoveContainer" containerID="90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b" Dec 05 19:17:47 crc kubenswrapper[4982]: I1205 19:17:47.989962 4982 scope.go:117] "RemoveContainer" containerID="88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.020045 4982 scope.go:117] "RemoveContainer" containerID="88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.040809 4982 scope.go:117] "RemoveContainer" containerID="fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111" Dec 05 19:17:48 crc kubenswrapper[4982]: E1205 19:17:48.041258 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\": container with ID starting with fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111 not found: ID does not exist" containerID="fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.041408 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111"} err="failed to get container status \"fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\": rpc error: code = NotFound desc = could not find container \"fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111\": container with ID starting with fb08189b098d945eb200472da7300f1503f4b116e1a6b98f98090bf9abfe7111 not found: ID does not exist" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.041434 4982 scope.go:117] "RemoveContainer" containerID="3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953" Dec 05 19:17:48 crc kubenswrapper[4982]: E1205 19:17:48.041678 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\": container with ID starting with 3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953 not found: ID does not exist" containerID="3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.041699 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953"} err="failed to get container status \"3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\": rpc error: code = NotFound desc = could not find container \"3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953\": container with ID starting with 3559cf43fba88f4e74d9e36d86e0e92f6341b79f1a16badb4b8d456b781e4953 not found: ID does not exist" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.041712 4982 scope.go:117] "RemoveContainer" containerID="7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58" Dec 05 19:17:48 crc kubenswrapper[4982]: E1205 19:17:48.041892 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\": container with ID starting with 7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58 not found: ID does not exist" containerID="7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.041909 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58"} err="failed to get container status \"7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\": rpc error: code = NotFound desc = could not find container \"7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58\": container with ID starting with 7f02ac5ca7eb43dd89176985cce5f7fa6f3eef459fb1c7862e11b4f6fa908f58 not found: ID does not exist" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.041921 4982 scope.go:117] "RemoveContainer" containerID="90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b" Dec 05 19:17:48 crc kubenswrapper[4982]: E1205 19:17:48.042126 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\": container with ID starting with 90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b not found: ID does not exist" containerID="90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.042141 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b"} err="failed to get container status \"90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\": rpc error: code = NotFound desc = could not find container \"90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b\": container with ID starting with 90f2bc7cd4d2d2eb5355d6c36d40ea6836cbf3246b79a3cd5df4025343b6d40b not found: ID does not exist" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.042173 4982 scope.go:117] "RemoveContainer" containerID="88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34" Dec 05 19:17:48 crc kubenswrapper[4982]: E1205 19:17:48.042655 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\": container with ID starting with 88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34 not found: ID does not exist" containerID="88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.042675 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34"} err="failed to get container status \"88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\": rpc error: code = NotFound desc = could not find container \"88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34\": container with ID starting with 88c46becc72551e1ab0f37c124406e0e52cdaf9beb7c5af3432468995a214e34 not found: ID does not exist" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.043022 4982 scope.go:117] "RemoveContainer" containerID="88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d" Dec 05 19:17:48 crc kubenswrapper[4982]: E1205 19:17:48.046996 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\": container with ID starting with 88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d not found: ID does not exist" containerID="88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.047033 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d"} err="failed to get container status \"88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\": rpc error: code = NotFound desc = could not find container \"88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d\": container with ID starting with 88359cd1d6c5a2b98eddd28329ea27d9efa1ed3dec28dc0804aa9b8f4a89506d not found: ID does not exist" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.171997 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.172965 4982 status_manager.go:851] "Failed to get status for pod" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.173574 4982 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:48 crc kubenswrapper[4982]: E1205 19:17:48.229626 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" interval="800ms" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.246196 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d7a3ed08-9759-41ad-8af6-0037be5c7f44-kube-api-access\") pod \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\" (UID: \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\") " Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.246537 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d7a3ed08-9759-41ad-8af6-0037be5c7f44-kubelet-dir\") pod \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\" (UID: \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\") " Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.246859 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d7a3ed08-9759-41ad-8af6-0037be5c7f44-var-lock\") pod \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\" (UID: \"d7a3ed08-9759-41ad-8af6-0037be5c7f44\") " Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.246652 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d7a3ed08-9759-41ad-8af6-0037be5c7f44-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d7a3ed08-9759-41ad-8af6-0037be5c7f44" (UID: "d7a3ed08-9759-41ad-8af6-0037be5c7f44"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.246893 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d7a3ed08-9759-41ad-8af6-0037be5c7f44-var-lock" (OuterVolumeSpecName: "var-lock") pod "d7a3ed08-9759-41ad-8af6-0037be5c7f44" (UID: "d7a3ed08-9759-41ad-8af6-0037be5c7f44"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.247529 4982 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d7a3ed08-9759-41ad-8af6-0037be5c7f44-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.247625 4982 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/d7a3ed08-9759-41ad-8af6-0037be5c7f44-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.253595 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7a3ed08-9759-41ad-8af6-0037be5c7f44-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d7a3ed08-9759-41ad-8af6-0037be5c7f44" (UID: "d7a3ed08-9759-41ad-8af6-0037be5c7f44"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.349527 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d7a3ed08-9759-41ad-8af6-0037be5c7f44-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.920348 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"d7a3ed08-9759-41ad-8af6-0037be5c7f44","Type":"ContainerDied","Data":"d236ca7437a3f898f196887a88345a5a52419b89b2897aabcae50cb0432db4be"} Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.920408 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d236ca7437a3f898f196887a88345a5a52419b89b2897aabcae50cb0432db4be" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.920503 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.946824 4982 status_manager.go:851] "Failed to get status for pod" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:48 crc kubenswrapper[4982]: I1205 19:17:48.947330 4982 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:49 crc kubenswrapper[4982]: E1205 19:17:49.030496 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" interval="1.6s" Dec 05 19:17:49 crc kubenswrapper[4982]: I1205 19:17:49.400373 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 19:17:49 crc kubenswrapper[4982]: E1205 19:17:49.699198 4982 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.94:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:49 crc kubenswrapper[4982]: I1205 19:17:49.700213 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:49 crc kubenswrapper[4982]: W1205 19:17:49.722787 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-2016aef522f2522d124ba6dc7d0122d1417e38163aa0d8fe5e60318fe30e9a26 WatchSource:0}: Error finding container 2016aef522f2522d124ba6dc7d0122d1417e38163aa0d8fe5e60318fe30e9a26: Status 404 returned error can't find the container with id 2016aef522f2522d124ba6dc7d0122d1417e38163aa0d8fe5e60318fe30e9a26 Dec 05 19:17:49 crc kubenswrapper[4982]: E1205 19:17:49.726272 4982 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.94:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e67d548f61763 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 19:17:49.725595491 +0000 UTC m=+248.607481516,LastTimestamp:2025-12-05 19:17:49.725595491 +0000 UTC m=+248.607481516,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 19:17:49 crc kubenswrapper[4982]: I1205 19:17:49.926441 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"2016aef522f2522d124ba6dc7d0122d1417e38163aa0d8fe5e60318fe30e9a26"} Dec 05 19:17:50 crc kubenswrapper[4982]: I1205 19:17:50.089881 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:17:50 crc kubenswrapper[4982]: I1205 19:17:50.090511 4982 status_manager.go:851] "Failed to get status for pod" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" pod="openshift-marketplace/community-operators-bhlmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bhlmf\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:50 crc kubenswrapper[4982]: I1205 19:17:50.090747 4982 status_manager.go:851] "Failed to get status for pod" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:50 crc kubenswrapper[4982]: I1205 19:17:50.630126 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:17:50 crc kubenswrapper[4982]: I1205 19:17:50.630445 4982 status_manager.go:851] "Failed to get status for pod" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" pod="openshift-marketplace/community-operators-bhlmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bhlmf\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:50 crc kubenswrapper[4982]: I1205 19:17:50.630762 4982 status_manager.go:851] "Failed to get status for pod" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" pod="openshift-marketplace/community-operators-dzwxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dzwxm\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:50 crc kubenswrapper[4982]: E1205 19:17:50.631242 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" interval="3.2s" Dec 05 19:17:50 crc kubenswrapper[4982]: I1205 19:17:50.631244 4982 status_manager.go:851] "Failed to get status for pod" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:50 crc kubenswrapper[4982]: I1205 19:17:50.936072 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"b85048a00a069ca23572a81a47f00737f9616bd6272d359d31e5886632a2f494"} Dec 05 19:17:50 crc kubenswrapper[4982]: I1205 19:17:50.936991 4982 status_manager.go:851] "Failed to get status for pod" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" pod="openshift-marketplace/community-operators-bhlmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bhlmf\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:50 crc kubenswrapper[4982]: E1205 19:17:50.937137 4982 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.94:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:50 crc kubenswrapper[4982]: I1205 19:17:50.937323 4982 status_manager.go:851] "Failed to get status for pod" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" pod="openshift-marketplace/community-operators-dzwxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dzwxm\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:50 crc kubenswrapper[4982]: I1205 19:17:50.937651 4982 status_manager.go:851] "Failed to get status for pod" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:51 crc kubenswrapper[4982]: I1205 19:17:51.392327 4982 status_manager.go:851] "Failed to get status for pod" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" pod="openshift-marketplace/community-operators-bhlmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bhlmf\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:51 crc kubenswrapper[4982]: I1205 19:17:51.392809 4982 status_manager.go:851] "Failed to get status for pod" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" pod="openshift-marketplace/community-operators-dzwxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dzwxm\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:51 crc kubenswrapper[4982]: I1205 19:17:51.393318 4982 status_manager.go:851] "Failed to get status for pod" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:51 crc kubenswrapper[4982]: E1205 19:17:51.945182 4982 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.94:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:17:53 crc kubenswrapper[4982]: E1205 19:17:53.750108 4982 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.94:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e67d548f61763 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 19:17:49.725595491 +0000 UTC m=+248.607481516,LastTimestamp:2025-12-05 19:17:49.725595491 +0000 UTC m=+248.607481516,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 19:17:53 crc kubenswrapper[4982]: E1205 19:17:53.832274 4982 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.94:6443: connect: connection refused" interval="6.4s" Dec 05 19:17:55 crc kubenswrapper[4982]: I1205 19:17:55.389611 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:55 crc kubenswrapper[4982]: I1205 19:17:55.390991 4982 status_manager.go:851] "Failed to get status for pod" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" pod="openshift-marketplace/community-operators-bhlmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bhlmf\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:55 crc kubenswrapper[4982]: I1205 19:17:55.391411 4982 status_manager.go:851] "Failed to get status for pod" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" pod="openshift-marketplace/community-operators-dzwxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dzwxm\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:55 crc kubenswrapper[4982]: I1205 19:17:55.391626 4982 status_manager.go:851] "Failed to get status for pod" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:55 crc kubenswrapper[4982]: I1205 19:17:55.404615 4982 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9e316779-c566-4497-b5dc-74bd06c3a798" Dec 05 19:17:55 crc kubenswrapper[4982]: I1205 19:17:55.404655 4982 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9e316779-c566-4497-b5dc-74bd06c3a798" Dec 05 19:17:55 crc kubenswrapper[4982]: E1205 19:17:55.405029 4982 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:55 crc kubenswrapper[4982]: I1205 19:17:55.405620 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:55 crc kubenswrapper[4982]: W1205 19:17:55.427584 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-26855403017dca5582b1a22986c397ddca750df3c25fded99102edd64b00e704 WatchSource:0}: Error finding container 26855403017dca5582b1a22986c397ddca750df3c25fded99102edd64b00e704: Status 404 returned error can't find the container with id 26855403017dca5582b1a22986c397ddca750df3c25fded99102edd64b00e704 Dec 05 19:17:56 crc kubenswrapper[4982]: I1205 19:17:56.004331 4982 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="0c12b971e633e3044f419392734d29bfd0d2a5ae24708c35c3522b1fced7b3f0" exitCode=0 Dec 05 19:17:56 crc kubenswrapper[4982]: I1205 19:17:56.004382 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"0c12b971e633e3044f419392734d29bfd0d2a5ae24708c35c3522b1fced7b3f0"} Dec 05 19:17:56 crc kubenswrapper[4982]: I1205 19:17:56.004411 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"26855403017dca5582b1a22986c397ddca750df3c25fded99102edd64b00e704"} Dec 05 19:17:56 crc kubenswrapper[4982]: I1205 19:17:56.004709 4982 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9e316779-c566-4497-b5dc-74bd06c3a798" Dec 05 19:17:56 crc kubenswrapper[4982]: I1205 19:17:56.004724 4982 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9e316779-c566-4497-b5dc-74bd06c3a798" Dec 05 19:17:56 crc kubenswrapper[4982]: E1205 19:17:56.005464 4982 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:56 crc kubenswrapper[4982]: I1205 19:17:56.005864 4982 status_manager.go:851] "Failed to get status for pod" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" pod="openshift-marketplace/community-operators-bhlmf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-bhlmf\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:56 crc kubenswrapper[4982]: I1205 19:17:56.006441 4982 status_manager.go:851] "Failed to get status for pod" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" pod="openshift-marketplace/community-operators-dzwxm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-dzwxm\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:56 crc kubenswrapper[4982]: I1205 19:17:56.006954 4982 status_manager.go:851] "Failed to get status for pod" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.94:6443: connect: connection refused" Dec 05 19:17:57 crc kubenswrapper[4982]: I1205 19:17:57.016554 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bfe392df307fecaeac33543ce6c5df66b2bb56f24fbbc8b3611a35e2a8aa39aa"} Dec 05 19:17:57 crc kubenswrapper[4982]: I1205 19:17:57.017101 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"830b38db01d39aa56986dbbca042824341bd76349a560db3b023226b1dd65685"} Dec 05 19:17:57 crc kubenswrapper[4982]: I1205 19:17:57.017117 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f9ea40c7ffffbc5ead095587d0520f14d1bb5bf5fd42ab5b51535d9cbbae7771"} Dec 05 19:17:57 crc kubenswrapper[4982]: I1205 19:17:57.017130 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"22a96e8aad1e525f3ea944711608052ea7f6f930fdf46e124507022842294a43"} Dec 05 19:17:58 crc kubenswrapper[4982]: I1205 19:17:58.077469 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ef868c8e0f1dc4fb07277c179eb440c459c4da39a910ff9b04ee076f48550763"} Dec 05 19:17:58 crc kubenswrapper[4982]: I1205 19:17:58.077569 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:17:58 crc kubenswrapper[4982]: I1205 19:17:58.077674 4982 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9e316779-c566-4497-b5dc-74bd06c3a798" Dec 05 19:17:58 crc kubenswrapper[4982]: I1205 19:17:58.077693 4982 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9e316779-c566-4497-b5dc-74bd06c3a798" Dec 05 19:17:58 crc kubenswrapper[4982]: I1205 19:17:58.081648 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 19:17:58 crc kubenswrapper[4982]: I1205 19:17:58.081696 4982 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a" exitCode=1 Dec 05 19:17:58 crc kubenswrapper[4982]: I1205 19:17:58.081722 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a"} Dec 05 19:17:58 crc kubenswrapper[4982]: I1205 19:17:58.082133 4982 scope.go:117] "RemoveContainer" containerID="db6e16ea6df50c54aa3d7bc93b9a5ab0de1418ccdf0c965f027faed6b464d44a" Dec 05 19:17:59 crc kubenswrapper[4982]: I1205 19:17:59.091422 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 19:17:59 crc kubenswrapper[4982]: I1205 19:17:59.091767 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ccfb2237eac3fee3671ebff915cf3cc996c14cfa73c7d7df81e96d6fa82bcb18"} Dec 05 19:18:00 crc kubenswrapper[4982]: I1205 19:18:00.406365 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:18:00 crc kubenswrapper[4982]: I1205 19:18:00.406423 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:18:00 crc kubenswrapper[4982]: I1205 19:18:00.414969 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:18:03 crc kubenswrapper[4982]: I1205 19:18:03.103103 4982 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:18:03 crc kubenswrapper[4982]: I1205 19:18:03.196197 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="ae237657-c99b-4e34-877d-e742310633ff" Dec 05 19:18:03 crc kubenswrapper[4982]: I1205 19:18:03.788398 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:18:03 crc kubenswrapper[4982]: I1205 19:18:03.788703 4982 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 19:18:03 crc kubenswrapper[4982]: I1205 19:18:03.788985 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 19:18:04 crc kubenswrapper[4982]: I1205 19:18:04.125900 4982 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9e316779-c566-4497-b5dc-74bd06c3a798" Dec 05 19:18:04 crc kubenswrapper[4982]: I1205 19:18:04.125953 4982 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9e316779-c566-4497-b5dc-74bd06c3a798" Dec 05 19:18:04 crc kubenswrapper[4982]: I1205 19:18:04.129793 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="ae237657-c99b-4e34-877d-e742310633ff" Dec 05 19:18:04 crc kubenswrapper[4982]: I1205 19:18:04.133573 4982 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://22a96e8aad1e525f3ea944711608052ea7f6f930fdf46e124507022842294a43" Dec 05 19:18:04 crc kubenswrapper[4982]: I1205 19:18:04.133619 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:18:05 crc kubenswrapper[4982]: I1205 19:18:05.131258 4982 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9e316779-c566-4497-b5dc-74bd06c3a798" Dec 05 19:18:05 crc kubenswrapper[4982]: I1205 19:18:05.131300 4982 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="9e316779-c566-4497-b5dc-74bd06c3a798" Dec 05 19:18:05 crc kubenswrapper[4982]: I1205 19:18:05.135431 4982 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="ae237657-c99b-4e34-877d-e742310633ff" Dec 05 19:18:06 crc kubenswrapper[4982]: I1205 19:18:06.083215 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:18:07 crc kubenswrapper[4982]: I1205 19:18:07.581240 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" podUID="a3ef803e-459d-4ade-abe6-e2201b265b09" containerName="oauth-openshift" containerID="cri-o://deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8" gracePeriod=15 Dec 05 19:18:07 crc kubenswrapper[4982]: I1205 19:18:07.944641 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.028691 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-serving-cert\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.028843 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-audit-policies\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.028898 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-provider-selection\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.028940 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-login\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.029027 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-session\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.029102 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-ocp-branding-template\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.029214 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-router-certs\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.029272 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-idp-0-file-data\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.029369 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-trusted-ca-bundle\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.030503 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-error\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.030551 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a3ef803e-459d-4ade-abe6-e2201b265b09-audit-dir\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.030588 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-cliconfig\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.030631 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-service-ca\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.030668 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lsr2n\" (UniqueName: \"kubernetes.io/projected/a3ef803e-459d-4ade-abe6-e2201b265b09-kube-api-access-lsr2n\") pod \"a3ef803e-459d-4ade-abe6-e2201b265b09\" (UID: \"a3ef803e-459d-4ade-abe6-e2201b265b09\") " Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.030822 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.030896 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a3ef803e-459d-4ade-abe6-e2201b265b09-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.030939 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.031324 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.031439 4982 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.031490 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.031520 4982 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a3ef803e-459d-4ade-abe6-e2201b265b09-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.031776 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.035578 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.036819 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.037492 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.038434 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.038488 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.038759 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.040502 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3ef803e-459d-4ade-abe6-e2201b265b09-kube-api-access-lsr2n" (OuterVolumeSpecName: "kube-api-access-lsr2n") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "kube-api-access-lsr2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.043440 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.043749 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "a3ef803e-459d-4ade-abe6-e2201b265b09" (UID: "a3ef803e-459d-4ade-abe6-e2201b265b09"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.133115 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.133204 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.133237 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.133264 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.133294 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.133319 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.133345 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.133372 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.133396 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.133420 4982 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/a3ef803e-459d-4ade-abe6-e2201b265b09-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.133445 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lsr2n\" (UniqueName: \"kubernetes.io/projected/a3ef803e-459d-4ade-abe6-e2201b265b09-kube-api-access-lsr2n\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.149939 4982 generic.go:334] "Generic (PLEG): container finished" podID="a3ef803e-459d-4ade-abe6-e2201b265b09" containerID="deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8" exitCode=0 Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.149986 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.149992 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" event={"ID":"a3ef803e-459d-4ade-abe6-e2201b265b09","Type":"ContainerDied","Data":"deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8"} Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.150111 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9z6x6" event={"ID":"a3ef803e-459d-4ade-abe6-e2201b265b09","Type":"ContainerDied","Data":"04fa63ea39454b188e4126dfbf434222c154d0c4827601b35102f68bbbfda83e"} Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.150135 4982 scope.go:117] "RemoveContainer" containerID="deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.173262 4982 scope.go:117] "RemoveContainer" containerID="deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8" Dec 05 19:18:08 crc kubenswrapper[4982]: E1205 19:18:08.173757 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8\": container with ID starting with deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8 not found: ID does not exist" containerID="deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8" Dec 05 19:18:08 crc kubenswrapper[4982]: I1205 19:18:08.173794 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8"} err="failed to get container status \"deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8\": rpc error: code = NotFound desc = could not find container \"deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8\": container with ID starting with deb99daebf04c0e93c53b67d2e141bb8c7781f3e746cebf52b113be733e74ab8 not found: ID does not exist" Dec 05 19:18:09 crc kubenswrapper[4982]: I1205 19:18:09.412276 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 19:18:10 crc kubenswrapper[4982]: I1205 19:18:10.226659 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 19:18:10 crc kubenswrapper[4982]: I1205 19:18:10.389882 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 19:18:10 crc kubenswrapper[4982]: I1205 19:18:10.778182 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 19:18:10 crc kubenswrapper[4982]: I1205 19:18:10.937669 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 19:18:11 crc kubenswrapper[4982]: I1205 19:18:11.220377 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 19:18:11 crc kubenswrapper[4982]: I1205 19:18:11.297024 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 19:18:11 crc kubenswrapper[4982]: I1205 19:18:11.380307 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 19:18:11 crc kubenswrapper[4982]: I1205 19:18:11.748538 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 19:18:11 crc kubenswrapper[4982]: I1205 19:18:11.980713 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 19:18:12 crc kubenswrapper[4982]: I1205 19:18:12.482596 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 19:18:12 crc kubenswrapper[4982]: I1205 19:18:12.555013 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 19:18:12 crc kubenswrapper[4982]: I1205 19:18:12.612390 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 19:18:12 crc kubenswrapper[4982]: I1205 19:18:12.627451 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 19:18:12 crc kubenswrapper[4982]: I1205 19:18:12.746905 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 19:18:13 crc kubenswrapper[4982]: I1205 19:18:13.099863 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 19:18:13 crc kubenswrapper[4982]: I1205 19:18:13.320064 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 19:18:13 crc kubenswrapper[4982]: I1205 19:18:13.352859 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 19:18:13 crc kubenswrapper[4982]: I1205 19:18:13.569822 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 19:18:13 crc kubenswrapper[4982]: I1205 19:18:13.723366 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 19:18:13 crc kubenswrapper[4982]: I1205 19:18:13.792605 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:18:13 crc kubenswrapper[4982]: I1205 19:18:13.796762 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 19:18:14 crc kubenswrapper[4982]: I1205 19:18:14.289241 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 19:18:14 crc kubenswrapper[4982]: I1205 19:18:14.341129 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 19:18:14 crc kubenswrapper[4982]: I1205 19:18:14.532339 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 19:18:14 crc kubenswrapper[4982]: I1205 19:18:14.652383 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 19:18:14 crc kubenswrapper[4982]: I1205 19:18:14.877904 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 19:18:14 crc kubenswrapper[4982]: I1205 19:18:14.904285 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 19:18:14 crc kubenswrapper[4982]: I1205 19:18:14.937027 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.010611 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.095842 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.116739 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.159839 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.176060 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.176665 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.232415 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.359062 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.364935 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.434986 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.476869 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.478939 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.830923 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 19:18:15 crc kubenswrapper[4982]: I1205 19:18:15.968552 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 19:18:16 crc kubenswrapper[4982]: I1205 19:18:16.058765 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 19:18:16 crc kubenswrapper[4982]: I1205 19:18:16.102516 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 19:18:16 crc kubenswrapper[4982]: I1205 19:18:16.242189 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 19:18:16 crc kubenswrapper[4982]: I1205 19:18:16.242960 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 19:18:16 crc kubenswrapper[4982]: I1205 19:18:16.244585 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 19:18:16 crc kubenswrapper[4982]: I1205 19:18:16.402012 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 19:18:16 crc kubenswrapper[4982]: I1205 19:18:16.671084 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 19:18:16 crc kubenswrapper[4982]: I1205 19:18:16.683759 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 19:18:16 crc kubenswrapper[4982]: I1205 19:18:16.779356 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 19:18:17 crc kubenswrapper[4982]: I1205 19:18:17.289829 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 19:18:17 crc kubenswrapper[4982]: I1205 19:18:17.473726 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 19:18:17 crc kubenswrapper[4982]: I1205 19:18:17.517672 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 19:18:17 crc kubenswrapper[4982]: I1205 19:18:17.697670 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 19:18:17 crc kubenswrapper[4982]: I1205 19:18:17.738870 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 19:18:17 crc kubenswrapper[4982]: I1205 19:18:17.856681 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 19:18:18 crc kubenswrapper[4982]: I1205 19:18:18.029306 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 19:18:18 crc kubenswrapper[4982]: I1205 19:18:18.171080 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 19:18:18 crc kubenswrapper[4982]: I1205 19:18:18.316124 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 19:18:18 crc kubenswrapper[4982]: I1205 19:18:18.389931 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 19:18:18 crc kubenswrapper[4982]: I1205 19:18:18.399261 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 19:18:18 crc kubenswrapper[4982]: I1205 19:18:18.454968 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 19:18:18 crc kubenswrapper[4982]: I1205 19:18:18.702987 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 19:18:18 crc kubenswrapper[4982]: I1205 19:18:18.991287 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 19:18:19 crc kubenswrapper[4982]: I1205 19:18:19.107131 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 19:18:19 crc kubenswrapper[4982]: I1205 19:18:19.133997 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 19:18:19 crc kubenswrapper[4982]: I1205 19:18:19.222853 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 19:18:19 crc kubenswrapper[4982]: I1205 19:18:19.277325 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 19:18:19 crc kubenswrapper[4982]: I1205 19:18:19.345712 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 19:18:19 crc kubenswrapper[4982]: I1205 19:18:19.359456 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 19:18:19 crc kubenswrapper[4982]: I1205 19:18:19.599983 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 19:18:19 crc kubenswrapper[4982]: I1205 19:18:19.625340 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 19:18:19 crc kubenswrapper[4982]: I1205 19:18:19.766722 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 19:18:19 crc kubenswrapper[4982]: I1205 19:18:19.843115 4982 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 19:18:19 crc kubenswrapper[4982]: I1205 19:18:19.887122 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 19:18:19 crc kubenswrapper[4982]: I1205 19:18:19.929914 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.079385 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.182087 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.242880 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.265854 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.347610 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.419317 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.497322 4982 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.568268 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.708260 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.801063 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.839418 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.853576 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.889432 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.937342 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 19:18:20 crc kubenswrapper[4982]: I1205 19:18:20.984631 4982 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.005742 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.008800 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.011255 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.043937 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.174087 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.232885 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.239552 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.292643 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.331247 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.429239 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.535275 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.598042 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.683375 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.739946 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.919420 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 19:18:21 crc kubenswrapper[4982]: I1205 19:18:21.944793 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.036174 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.054610 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.140776 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.164463 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.279402 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.316497 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.374903 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.425934 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.499400 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.505561 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.554573 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.573870 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.574080 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.782300 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.837817 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.854344 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 19:18:22 crc kubenswrapper[4982]: I1205 19:18:22.978665 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 19:18:23 crc kubenswrapper[4982]: I1205 19:18:23.092581 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 19:18:23 crc kubenswrapper[4982]: I1205 19:18:23.094749 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 19:18:23 crc kubenswrapper[4982]: I1205 19:18:23.223221 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 19:18:23 crc kubenswrapper[4982]: I1205 19:18:23.377127 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 19:18:23 crc kubenswrapper[4982]: I1205 19:18:23.447981 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 19:18:23 crc kubenswrapper[4982]: I1205 19:18:23.478186 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 19:18:23 crc kubenswrapper[4982]: I1205 19:18:23.729591 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 19:18:23 crc kubenswrapper[4982]: I1205 19:18:23.864293 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 19:18:23 crc kubenswrapper[4982]: I1205 19:18:23.953946 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.022803 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.056423 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.108473 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.140736 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.258361 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.263247 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.319456 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.403918 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.424569 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.528515 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.552852 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.576368 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.578520 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.593335 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.653412 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.742550 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.768240 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.834124 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.908570 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 19:18:24 crc kubenswrapper[4982]: I1205 19:18:24.993061 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.089732 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.176820 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.186431 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.205383 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.206530 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.226003 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.271975 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.323107 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.337769 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.392716 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.414128 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.437970 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.471998 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.560391 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.585888 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.603446 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.647492 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.787949 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.956708 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 19:18:25 crc kubenswrapper[4982]: I1205 19:18:25.959388 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.132131 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.184316 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.192650 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.232792 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.313861 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.409711 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.442296 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.622789 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.626978 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.634027 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.655268 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.704415 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.735852 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.742066 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.790896 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.852135 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 19:18:26 crc kubenswrapper[4982]: I1205 19:18:26.898871 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.062506 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.090004 4982 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.109034 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.142918 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.264842 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.300310 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.421831 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.430885 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.598974 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.624327 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.674507 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.675014 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.889188 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.967127 4982 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.972923 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-9z6x6"] Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.973004 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.979085 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 19:18:27 crc kubenswrapper[4982]: I1205 19:18:27.999045 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=24.99901788 podStartE2EDuration="24.99901788s" podCreationTimestamp="2025-12-05 19:18:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:18:27.997587193 +0000 UTC m=+286.879473268" watchObservedRunningTime="2025-12-05 19:18:27.99901788 +0000 UTC m=+286.880903915" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.004568 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.249685 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.339454 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.557526 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.602136 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.765837 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.817369 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-dff7fc6f-5vpsd"] Dec 05 19:18:28 crc kubenswrapper[4982]: E1205 19:18:28.817704 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3ef803e-459d-4ade-abe6-e2201b265b09" containerName="oauth-openshift" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.817721 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3ef803e-459d-4ade-abe6-e2201b265b09" containerName="oauth-openshift" Dec 05 19:18:28 crc kubenswrapper[4982]: E1205 19:18:28.817733 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" containerName="installer" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.817740 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" containerName="installer" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.817862 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3ef803e-459d-4ade-abe6-e2201b265b09" containerName="oauth-openshift" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.817887 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7a3ed08-9759-41ad-8af6-0037be5c7f44" containerName="installer" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.818451 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.822009 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.823192 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.824667 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.824760 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.825241 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.825394 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.825512 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.825727 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.825728 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.825952 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.826251 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.830165 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.835827 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.841395 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-dff7fc6f-5vpsd"] Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.862503 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.877520 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916509 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-audit-policies\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916573 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-audit-dir\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916600 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916628 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-serving-cert\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916658 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-session\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916681 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-cliconfig\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916697 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-service-ca\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916717 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916739 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916754 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-user-template-login\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916769 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-user-template-error\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916802 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-router-certs\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916819 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.916834 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4299t\" (UniqueName: \"kubernetes.io/projected/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-kube-api-access-4299t\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:28 crc kubenswrapper[4982]: I1205 19:18:28.991511 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.017658 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-serving-cert\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.017996 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-session\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.018193 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-cliconfig\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.018363 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-service-ca\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.018510 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.018659 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.018828 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-user-template-login\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.018988 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-user-template-error\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.019214 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-router-certs\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.019374 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.019550 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4299t\" (UniqueName: \"kubernetes.io/projected/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-kube-api-access-4299t\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.019738 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-audit-policies\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.019870 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-cliconfig\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.019895 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-audit-dir\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.020206 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.020391 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.020353 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-audit-dir\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.021025 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-audit-policies\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.021370 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-service-ca\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.026271 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-user-template-error\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.026591 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.027589 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.027750 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-user-template-login\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.029211 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.029514 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-router-certs\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.034861 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-session\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.041743 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-v4-0-config-system-serving-cert\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.057658 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4299t\" (UniqueName: \"kubernetes.io/projected/cdfaef0e-b536-4257-8edd-6dfa5f0d05da-kube-api-access-4299t\") pod \"oauth-openshift-dff7fc6f-5vpsd\" (UID: \"cdfaef0e-b536-4257-8edd-6dfa5f0d05da\") " pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.058251 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.094587 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.146682 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.173066 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.234114 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.261061 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.387545 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.399383 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3ef803e-459d-4ade-abe6-e2201b265b09" path="/var/lib/kubelet/pods/a3ef803e-459d-4ade-abe6-e2201b265b09/volumes" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.426678 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.440243 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-dff7fc6f-5vpsd"] Dec 05 19:18:29 crc kubenswrapper[4982]: W1205 19:18:29.455553 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcdfaef0e_b536_4257_8edd_6dfa5f0d05da.slice/crio-270ae2e0254d5f92f3c9c9ca72185d7eae4e41727c2cd261b027c4ccd39a3338 WatchSource:0}: Error finding container 270ae2e0254d5f92f3c9c9ca72185d7eae4e41727c2cd261b027c4ccd39a3338: Status 404 returned error can't find the container with id 270ae2e0254d5f92f3c9c9ca72185d7eae4e41727c2cd261b027c4ccd39a3338 Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.549527 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.776415 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.829846 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 19:18:29 crc kubenswrapper[4982]: I1205 19:18:29.952857 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.129340 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.208906 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.293197 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-dff7fc6f-5vpsd_cdfaef0e-b536-4257-8edd-6dfa5f0d05da/oauth-openshift/0.log" Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.293242 4982 generic.go:334] "Generic (PLEG): container finished" podID="cdfaef0e-b536-4257-8edd-6dfa5f0d05da" containerID="b7cf33c19f1755dc2097f5ddb0a74860f977d41c99ae42bb3dd6af007fba4f4a" exitCode=255 Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.293270 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" event={"ID":"cdfaef0e-b536-4257-8edd-6dfa5f0d05da","Type":"ContainerDied","Data":"b7cf33c19f1755dc2097f5ddb0a74860f977d41c99ae42bb3dd6af007fba4f4a"} Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.293295 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" event={"ID":"cdfaef0e-b536-4257-8edd-6dfa5f0d05da","Type":"ContainerStarted","Data":"270ae2e0254d5f92f3c9c9ca72185d7eae4e41727c2cd261b027c4ccd39a3338"} Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.293987 4982 scope.go:117] "RemoveContainer" containerID="b7cf33c19f1755dc2097f5ddb0a74860f977d41c99ae42bb3dd6af007fba4f4a" Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.375487 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.421589 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.454788 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.658366 4982 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.667609 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.827936 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.892592 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 19:18:30 crc kubenswrapper[4982]: I1205 19:18:30.986037 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 19:18:31 crc kubenswrapper[4982]: I1205 19:18:31.076978 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 19:18:31 crc kubenswrapper[4982]: I1205 19:18:31.140520 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 19:18:31 crc kubenswrapper[4982]: I1205 19:18:31.304348 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-dff7fc6f-5vpsd_cdfaef0e-b536-4257-8edd-6dfa5f0d05da/oauth-openshift/1.log" Dec 05 19:18:31 crc kubenswrapper[4982]: I1205 19:18:31.307073 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-dff7fc6f-5vpsd_cdfaef0e-b536-4257-8edd-6dfa5f0d05da/oauth-openshift/0.log" Dec 05 19:18:31 crc kubenswrapper[4982]: I1205 19:18:31.307190 4982 generic.go:334] "Generic (PLEG): container finished" podID="cdfaef0e-b536-4257-8edd-6dfa5f0d05da" containerID="66e9129614934fe3642a8c3bf1462a84d20dd58d76b9ce327e9427b614f22871" exitCode=255 Dec 05 19:18:31 crc kubenswrapper[4982]: I1205 19:18:31.307242 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" event={"ID":"cdfaef0e-b536-4257-8edd-6dfa5f0d05da","Type":"ContainerDied","Data":"66e9129614934fe3642a8c3bf1462a84d20dd58d76b9ce327e9427b614f22871"} Dec 05 19:18:31 crc kubenswrapper[4982]: I1205 19:18:31.307292 4982 scope.go:117] "RemoveContainer" containerID="b7cf33c19f1755dc2097f5ddb0a74860f977d41c99ae42bb3dd6af007fba4f4a" Dec 05 19:18:31 crc kubenswrapper[4982]: I1205 19:18:31.308072 4982 scope.go:117] "RemoveContainer" containerID="66e9129614934fe3642a8c3bf1462a84d20dd58d76b9ce327e9427b614f22871" Dec 05 19:18:31 crc kubenswrapper[4982]: E1205 19:18:31.308452 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-dff7fc6f-5vpsd_openshift-authentication(cdfaef0e-b536-4257-8edd-6dfa5f0d05da)\"" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" podUID="cdfaef0e-b536-4257-8edd-6dfa5f0d05da" Dec 05 19:18:31 crc kubenswrapper[4982]: I1205 19:18:31.530380 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 19:18:31 crc kubenswrapper[4982]: I1205 19:18:31.576832 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 19:18:31 crc kubenswrapper[4982]: I1205 19:18:31.796289 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 19:18:32 crc kubenswrapper[4982]: I1205 19:18:32.318692 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-dff7fc6f-5vpsd_cdfaef0e-b536-4257-8edd-6dfa5f0d05da/oauth-openshift/1.log" Dec 05 19:18:32 crc kubenswrapper[4982]: I1205 19:18:32.319200 4982 scope.go:117] "RemoveContainer" containerID="66e9129614934fe3642a8c3bf1462a84d20dd58d76b9ce327e9427b614f22871" Dec 05 19:18:32 crc kubenswrapper[4982]: E1205 19:18:32.319436 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-dff7fc6f-5vpsd_openshift-authentication(cdfaef0e-b536-4257-8edd-6dfa5f0d05da)\"" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" podUID="cdfaef0e-b536-4257-8edd-6dfa5f0d05da" Dec 05 19:18:36 crc kubenswrapper[4982]: I1205 19:18:36.859542 4982 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 19:18:36 crc kubenswrapper[4982]: I1205 19:18:36.860267 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://b85048a00a069ca23572a81a47f00737f9616bd6272d359d31e5886632a2f494" gracePeriod=5 Dec 05 19:18:39 crc kubenswrapper[4982]: I1205 19:18:39.174251 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:39 crc kubenswrapper[4982]: I1205 19:18:39.174327 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:39 crc kubenswrapper[4982]: I1205 19:18:39.174969 4982 scope.go:117] "RemoveContainer" containerID="66e9129614934fe3642a8c3bf1462a84d20dd58d76b9ce327e9427b614f22871" Dec 05 19:18:39 crc kubenswrapper[4982]: E1205 19:18:39.175211 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oauth-openshift\" with CrashLoopBackOff: \"back-off 10s restarting failed container=oauth-openshift pod=oauth-openshift-dff7fc6f-5vpsd_openshift-authentication(cdfaef0e-b536-4257-8edd-6dfa5f0d05da)\"" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" podUID="cdfaef0e-b536-4257-8edd-6dfa5f0d05da" Dec 05 19:18:41 crc kubenswrapper[4982]: I1205 19:18:41.239282 4982 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.405796 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.406111 4982 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="b85048a00a069ca23572a81a47f00737f9616bd6272d359d31e5886632a2f494" exitCode=137 Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.406201 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2016aef522f2522d124ba6dc7d0122d1417e38163aa0d8fe5e60318fe30e9a26" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.448737 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.448820 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.601516 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.601586 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.601648 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.601679 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.601697 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.601712 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.601941 4982 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.601979 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.602007 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.601647 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.612526 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.703636 4982 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.703674 4982 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.703686 4982 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:42 crc kubenswrapper[4982]: I1205 19:18:42.703695 4982 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 19:18:43 crc kubenswrapper[4982]: I1205 19:18:43.401392 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 19:18:43 crc kubenswrapper[4982]: I1205 19:18:43.412302 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 19:18:50 crc kubenswrapper[4982]: I1205 19:18:50.391123 4982 scope.go:117] "RemoveContainer" containerID="66e9129614934fe3642a8c3bf1462a84d20dd58d76b9ce327e9427b614f22871" Dec 05 19:18:50 crc kubenswrapper[4982]: I1205 19:18:50.465488 4982 generic.go:334] "Generic (PLEG): container finished" podID="f0aef6cf-30ed-4fc5-b84d-585e4692afe9" containerID="f724160aff553a57e8ffc46d0828689a3ed89dca5d69621ba675eb0c61e33a7d" exitCode=0 Dec 05 19:18:50 crc kubenswrapper[4982]: I1205 19:18:50.465532 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" event={"ID":"f0aef6cf-30ed-4fc5-b84d-585e4692afe9","Type":"ContainerDied","Data":"f724160aff553a57e8ffc46d0828689a3ed89dca5d69621ba675eb0c61e33a7d"} Dec 05 19:18:50 crc kubenswrapper[4982]: I1205 19:18:50.465963 4982 scope.go:117] "RemoveContainer" containerID="f724160aff553a57e8ffc46d0828689a3ed89dca5d69621ba675eb0c61e33a7d" Dec 05 19:18:51 crc kubenswrapper[4982]: I1205 19:18:51.472962 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-authentication_oauth-openshift-dff7fc6f-5vpsd_cdfaef0e-b536-4257-8edd-6dfa5f0d05da/oauth-openshift/1.log" Dec 05 19:18:51 crc kubenswrapper[4982]: I1205 19:18:51.473305 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" event={"ID":"cdfaef0e-b536-4257-8edd-6dfa5f0d05da","Type":"ContainerStarted","Data":"c8d47357fdcca4f6088b5a77179cfad316b32091a2126d60d5db3919ff601720"} Dec 05 19:18:51 crc kubenswrapper[4982]: I1205 19:18:51.473962 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:51 crc kubenswrapper[4982]: I1205 19:18:51.476331 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" event={"ID":"f0aef6cf-30ed-4fc5-b84d-585e4692afe9","Type":"ContainerStarted","Data":"b4fafc70df566d0ff95497164a6c826463887d66fc3e290a7d8db616a5540cda"} Dec 05 19:18:51 crc kubenswrapper[4982]: I1205 19:18:51.477220 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:18:51 crc kubenswrapper[4982]: I1205 19:18:51.479308 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:18:51 crc kubenswrapper[4982]: I1205 19:18:51.481301 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" Dec 05 19:18:51 crc kubenswrapper[4982]: I1205 19:18:51.508944 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-dff7fc6f-5vpsd" podStartSLOduration=69.508923234 podStartE2EDuration="1m9.508923234s" podCreationTimestamp="2025-12-05 19:17:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:18:51.504217555 +0000 UTC m=+310.386103600" watchObservedRunningTime="2025-12-05 19:18:51.508923234 +0000 UTC m=+310.390809239" Dec 05 19:19:24 crc kubenswrapper[4982]: I1205 19:19:24.731680 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bzhst"] Dec 05 19:19:24 crc kubenswrapper[4982]: I1205 19:19:24.733555 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" podUID="d832b229-50b0-4f09-a892-eb36e39004fb" containerName="controller-manager" containerID="cri-o://811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665" gracePeriod=30 Dec 05 19:19:24 crc kubenswrapper[4982]: I1205 19:19:24.833965 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp"] Dec 05 19:19:24 crc kubenswrapper[4982]: I1205 19:19:24.834548 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" podUID="4f105472-b420-4bb3-877d-663d96eed1af" containerName="route-controller-manager" containerID="cri-o://f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc" gracePeriod=30 Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.087212 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.120527 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-config\") pod \"d832b229-50b0-4f09-a892-eb36e39004fb\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.120572 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-client-ca\") pod \"d832b229-50b0-4f09-a892-eb36e39004fb\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.120663 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p8jf6\" (UniqueName: \"kubernetes.io/projected/d832b229-50b0-4f09-a892-eb36e39004fb-kube-api-access-p8jf6\") pod \"d832b229-50b0-4f09-a892-eb36e39004fb\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.120703 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d832b229-50b0-4f09-a892-eb36e39004fb-serving-cert\") pod \"d832b229-50b0-4f09-a892-eb36e39004fb\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.120743 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-proxy-ca-bundles\") pod \"d832b229-50b0-4f09-a892-eb36e39004fb\" (UID: \"d832b229-50b0-4f09-a892-eb36e39004fb\") " Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.121907 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d832b229-50b0-4f09-a892-eb36e39004fb" (UID: "d832b229-50b0-4f09-a892-eb36e39004fb"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.122022 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-client-ca" (OuterVolumeSpecName: "client-ca") pod "d832b229-50b0-4f09-a892-eb36e39004fb" (UID: "d832b229-50b0-4f09-a892-eb36e39004fb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.122450 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-config" (OuterVolumeSpecName: "config") pod "d832b229-50b0-4f09-a892-eb36e39004fb" (UID: "d832b229-50b0-4f09-a892-eb36e39004fb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.127951 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d832b229-50b0-4f09-a892-eb36e39004fb-kube-api-access-p8jf6" (OuterVolumeSpecName: "kube-api-access-p8jf6") pod "d832b229-50b0-4f09-a892-eb36e39004fb" (UID: "d832b229-50b0-4f09-a892-eb36e39004fb"). InnerVolumeSpecName "kube-api-access-p8jf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.133838 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d832b229-50b0-4f09-a892-eb36e39004fb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d832b229-50b0-4f09-a892-eb36e39004fb" (UID: "d832b229-50b0-4f09-a892-eb36e39004fb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.190470 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.222419 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f105472-b420-4bb3-877d-663d96eed1af-serving-cert\") pod \"4f105472-b420-4bb3-877d-663d96eed1af\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.222474 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-config\") pod \"4f105472-b420-4bb3-877d-663d96eed1af\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.222507 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-client-ca\") pod \"4f105472-b420-4bb3-877d-663d96eed1af\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.222572 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8bhn\" (UniqueName: \"kubernetes.io/projected/4f105472-b420-4bb3-877d-663d96eed1af-kube-api-access-d8bhn\") pod \"4f105472-b420-4bb3-877d-663d96eed1af\" (UID: \"4f105472-b420-4bb3-877d-663d96eed1af\") " Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.222801 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p8jf6\" (UniqueName: \"kubernetes.io/projected/d832b229-50b0-4f09-a892-eb36e39004fb-kube-api-access-p8jf6\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.222823 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d832b229-50b0-4f09-a892-eb36e39004fb-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.222836 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.222846 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.222859 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d832b229-50b0-4f09-a892-eb36e39004fb-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.223901 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-config" (OuterVolumeSpecName: "config") pod "4f105472-b420-4bb3-877d-663d96eed1af" (UID: "4f105472-b420-4bb3-877d-663d96eed1af"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.224100 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-client-ca" (OuterVolumeSpecName: "client-ca") pod "4f105472-b420-4bb3-877d-663d96eed1af" (UID: "4f105472-b420-4bb3-877d-663d96eed1af"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.227103 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f105472-b420-4bb3-877d-663d96eed1af-kube-api-access-d8bhn" (OuterVolumeSpecName: "kube-api-access-d8bhn") pod "4f105472-b420-4bb3-877d-663d96eed1af" (UID: "4f105472-b420-4bb3-877d-663d96eed1af"). InnerVolumeSpecName "kube-api-access-d8bhn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.227322 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f105472-b420-4bb3-877d-663d96eed1af-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4f105472-b420-4bb3-877d-663d96eed1af" (UID: "4f105472-b420-4bb3-877d-663d96eed1af"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.323795 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.323839 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8bhn\" (UniqueName: \"kubernetes.io/projected/4f105472-b420-4bb3-877d-663d96eed1af-kube-api-access-d8bhn\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.323854 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f105472-b420-4bb3-877d-663d96eed1af-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.323868 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f105472-b420-4bb3-877d-663d96eed1af-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.562241 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7df4fb797d-pg8z4"] Dec 05 19:19:25 crc kubenswrapper[4982]: E1205 19:19:25.562500 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.562515 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 19:19:25 crc kubenswrapper[4982]: E1205 19:19:25.562528 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d832b229-50b0-4f09-a892-eb36e39004fb" containerName="controller-manager" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.562536 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d832b229-50b0-4f09-a892-eb36e39004fb" containerName="controller-manager" Dec 05 19:19:25 crc kubenswrapper[4982]: E1205 19:19:25.562547 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f105472-b420-4bb3-877d-663d96eed1af" containerName="route-controller-manager" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.562555 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f105472-b420-4bb3-877d-663d96eed1af" containerName="route-controller-manager" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.562669 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.562691 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f105472-b420-4bb3-877d-663d96eed1af" containerName="route-controller-manager" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.562705 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d832b229-50b0-4f09-a892-eb36e39004fb" containerName="controller-manager" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.563129 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.582810 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7df4fb797d-pg8z4"] Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.628089 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/857a9421-04de-48ba-a757-0a764da1b85b-serving-cert\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.628181 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-config\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.628245 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psc8q\" (UniqueName: \"kubernetes.io/projected/857a9421-04de-48ba-a757-0a764da1b85b-kube-api-access-psc8q\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.628368 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-proxy-ca-bundles\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.628440 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-client-ca\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.645179 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx"] Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.645894 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.654509 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx"] Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.691249 4982 generic.go:334] "Generic (PLEG): container finished" podID="4f105472-b420-4bb3-877d-663d96eed1af" containerID="f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc" exitCode=0 Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.691307 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.691320 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" event={"ID":"4f105472-b420-4bb3-877d-663d96eed1af","Type":"ContainerDied","Data":"f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc"} Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.691349 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp" event={"ID":"4f105472-b420-4bb3-877d-663d96eed1af","Type":"ContainerDied","Data":"3ca10dd8a85b3b1324c89537765a128ba83445d6da2cead6e7c49f6e71eda6cc"} Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.691368 4982 scope.go:117] "RemoveContainer" containerID="f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.693608 4982 generic.go:334] "Generic (PLEG): container finished" podID="d832b229-50b0-4f09-a892-eb36e39004fb" containerID="811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665" exitCode=0 Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.693639 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" event={"ID":"d832b229-50b0-4f09-a892-eb36e39004fb","Type":"ContainerDied","Data":"811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665"} Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.693656 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" event={"ID":"d832b229-50b0-4f09-a892-eb36e39004fb","Type":"ContainerDied","Data":"841f1e3b295b64f1c5deae6418b871b92026ccf6e622045674d30be414e2b7f4"} Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.693692 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-bzhst" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.711016 4982 scope.go:117] "RemoveContainer" containerID="f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc" Dec 05 19:19:25 crc kubenswrapper[4982]: E1205 19:19:25.711454 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc\": container with ID starting with f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc not found: ID does not exist" containerID="f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.711477 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc"} err="failed to get container status \"f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc\": rpc error: code = NotFound desc = could not find container \"f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc\": container with ID starting with f177d33a24d9840313a4cb5279fda544781c46a13f4cf27ca3437377165925dc not found: ID does not exist" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.711494 4982 scope.go:117] "RemoveContainer" containerID="811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.722283 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bzhst"] Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.727114 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-bzhst"] Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.728043 4982 scope.go:117] "RemoveContainer" containerID="811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665" Dec 05 19:19:25 crc kubenswrapper[4982]: E1205 19:19:25.728527 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665\": container with ID starting with 811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665 not found: ID does not exist" containerID="811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.728555 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665"} err="failed to get container status \"811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665\": rpc error: code = NotFound desc = could not find container \"811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665\": container with ID starting with 811323e629877cecc3132e9039a41d8ada8184e756bcaa8470615c73ce7a6665 not found: ID does not exist" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.729200 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/278d5614-b4ae-4307-bde4-418bc9bdd140-client-ca\") pod \"route-controller-manager-bf67cfb8c-79wtx\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.729237 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5r5n\" (UniqueName: \"kubernetes.io/projected/278d5614-b4ae-4307-bde4-418bc9bdd140-kube-api-access-b5r5n\") pod \"route-controller-manager-bf67cfb8c-79wtx\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.729289 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psc8q\" (UniqueName: \"kubernetes.io/projected/857a9421-04de-48ba-a757-0a764da1b85b-kube-api-access-psc8q\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.729342 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-proxy-ca-bundles\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.729370 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-client-ca\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.729407 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/278d5614-b4ae-4307-bde4-418bc9bdd140-serving-cert\") pod \"route-controller-manager-bf67cfb8c-79wtx\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.729444 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/857a9421-04de-48ba-a757-0a764da1b85b-serving-cert\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.729470 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/278d5614-b4ae-4307-bde4-418bc9bdd140-config\") pod \"route-controller-manager-bf67cfb8c-79wtx\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.729512 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-config\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.731103 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-config\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.731116 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-proxy-ca-bundles\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.732102 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-client-ca\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.735858 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/857a9421-04de-48ba-a757-0a764da1b85b-serving-cert\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.737274 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp"] Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.740988 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-qsnxp"] Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.748244 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psc8q\" (UniqueName: \"kubernetes.io/projected/857a9421-04de-48ba-a757-0a764da1b85b-kube-api-access-psc8q\") pod \"controller-manager-7df4fb797d-pg8z4\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.830958 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/278d5614-b4ae-4307-bde4-418bc9bdd140-serving-cert\") pod \"route-controller-manager-bf67cfb8c-79wtx\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.831021 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/278d5614-b4ae-4307-bde4-418bc9bdd140-config\") pod \"route-controller-manager-bf67cfb8c-79wtx\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.831100 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/278d5614-b4ae-4307-bde4-418bc9bdd140-client-ca\") pod \"route-controller-manager-bf67cfb8c-79wtx\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.831139 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5r5n\" (UniqueName: \"kubernetes.io/projected/278d5614-b4ae-4307-bde4-418bc9bdd140-kube-api-access-b5r5n\") pod \"route-controller-manager-bf67cfb8c-79wtx\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.831986 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/278d5614-b4ae-4307-bde4-418bc9bdd140-client-ca\") pod \"route-controller-manager-bf67cfb8c-79wtx\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.832319 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/278d5614-b4ae-4307-bde4-418bc9bdd140-config\") pod \"route-controller-manager-bf67cfb8c-79wtx\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.834721 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/278d5614-b4ae-4307-bde4-418bc9bdd140-serving-cert\") pod \"route-controller-manager-bf67cfb8c-79wtx\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.849432 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5r5n\" (UniqueName: \"kubernetes.io/projected/278d5614-b4ae-4307-bde4-418bc9bdd140-kube-api-access-b5r5n\") pod \"route-controller-manager-bf67cfb8c-79wtx\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.879753 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:25 crc kubenswrapper[4982]: I1205 19:19:25.986594 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:26 crc kubenswrapper[4982]: I1205 19:19:26.187560 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx"] Dec 05 19:19:26 crc kubenswrapper[4982]: W1205 19:19:26.191493 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod278d5614_b4ae_4307_bde4_418bc9bdd140.slice/crio-e4669ddd8a18716f22afba7d9b359576536f8c994c0c329bf07b1b93b26a9c02 WatchSource:0}: Error finding container e4669ddd8a18716f22afba7d9b359576536f8c994c0c329bf07b1b93b26a9c02: Status 404 returned error can't find the container with id e4669ddd8a18716f22afba7d9b359576536f8c994c0c329bf07b1b93b26a9c02 Dec 05 19:19:26 crc kubenswrapper[4982]: W1205 19:19:26.302591 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod857a9421_04de_48ba_a757_0a764da1b85b.slice/crio-13baeefcc0db8b3e9d3d03093cb8b841c533f365d167ba2b79d2804c67a7ad33 WatchSource:0}: Error finding container 13baeefcc0db8b3e9d3d03093cb8b841c533f365d167ba2b79d2804c67a7ad33: Status 404 returned error can't find the container with id 13baeefcc0db8b3e9d3d03093cb8b841c533f365d167ba2b79d2804c67a7ad33 Dec 05 19:19:26 crc kubenswrapper[4982]: I1205 19:19:26.304094 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7df4fb797d-pg8z4"] Dec 05 19:19:26 crc kubenswrapper[4982]: I1205 19:19:26.699499 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" event={"ID":"278d5614-b4ae-4307-bde4-418bc9bdd140","Type":"ContainerStarted","Data":"ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb"} Dec 05 19:19:26 crc kubenswrapper[4982]: I1205 19:19:26.699766 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" event={"ID":"278d5614-b4ae-4307-bde4-418bc9bdd140","Type":"ContainerStarted","Data":"e4669ddd8a18716f22afba7d9b359576536f8c994c0c329bf07b1b93b26a9c02"} Dec 05 19:19:26 crc kubenswrapper[4982]: I1205 19:19:26.699802 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:26 crc kubenswrapper[4982]: I1205 19:19:26.702193 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" event={"ID":"857a9421-04de-48ba-a757-0a764da1b85b","Type":"ContainerStarted","Data":"417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471"} Dec 05 19:19:26 crc kubenswrapper[4982]: I1205 19:19:26.702221 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" event={"ID":"857a9421-04de-48ba-a757-0a764da1b85b","Type":"ContainerStarted","Data":"13baeefcc0db8b3e9d3d03093cb8b841c533f365d167ba2b79d2804c67a7ad33"} Dec 05 19:19:26 crc kubenswrapper[4982]: I1205 19:19:26.702506 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:26 crc kubenswrapper[4982]: I1205 19:19:26.706616 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:26 crc kubenswrapper[4982]: I1205 19:19:26.717392 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" podStartSLOduration=1.717379211 podStartE2EDuration="1.717379211s" podCreationTimestamp="2025-12-05 19:19:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:19:26.714871648 +0000 UTC m=+345.596757643" watchObservedRunningTime="2025-12-05 19:19:26.717379211 +0000 UTC m=+345.599265196" Dec 05 19:19:26 crc kubenswrapper[4982]: I1205 19:19:26.732128 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" podStartSLOduration=1.732108913 podStartE2EDuration="1.732108913s" podCreationTimestamp="2025-12-05 19:19:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:19:26.728480142 +0000 UTC m=+345.610366137" watchObservedRunningTime="2025-12-05 19:19:26.732108913 +0000 UTC m=+345.613994908" Dec 05 19:19:26 crc kubenswrapper[4982]: I1205 19:19:26.833366 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:27 crc kubenswrapper[4982]: I1205 19:19:27.091391 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7df4fb797d-pg8z4"] Dec 05 19:19:27 crc kubenswrapper[4982]: I1205 19:19:27.107600 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx"] Dec 05 19:19:27 crc kubenswrapper[4982]: I1205 19:19:27.404372 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f105472-b420-4bb3-877d-663d96eed1af" path="/var/lib/kubelet/pods/4f105472-b420-4bb3-877d-663d96eed1af/volumes" Dec 05 19:19:27 crc kubenswrapper[4982]: I1205 19:19:27.405797 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d832b229-50b0-4f09-a892-eb36e39004fb" path="/var/lib/kubelet/pods/d832b229-50b0-4f09-a892-eb36e39004fb/volumes" Dec 05 19:19:28 crc kubenswrapper[4982]: I1205 19:19:28.716659 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" podUID="278d5614-b4ae-4307-bde4-418bc9bdd140" containerName="route-controller-manager" containerID="cri-o://ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb" gracePeriod=30 Dec 05 19:19:28 crc kubenswrapper[4982]: I1205 19:19:28.716869 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" podUID="857a9421-04de-48ba-a757-0a764da1b85b" containerName="controller-manager" containerID="cri-o://417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471" gracePeriod=30 Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.232112 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.238308 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.270438 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8"] Dec 05 19:19:29 crc kubenswrapper[4982]: E1205 19:19:29.270987 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="857a9421-04de-48ba-a757-0a764da1b85b" containerName="controller-manager" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.271027 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="857a9421-04de-48ba-a757-0a764da1b85b" containerName="controller-manager" Dec 05 19:19:29 crc kubenswrapper[4982]: E1205 19:19:29.271085 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="278d5614-b4ae-4307-bde4-418bc9bdd140" containerName="route-controller-manager" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.271106 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="278d5614-b4ae-4307-bde4-418bc9bdd140" containerName="route-controller-manager" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.271473 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="278d5614-b4ae-4307-bde4-418bc9bdd140" containerName="route-controller-manager" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.271534 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="857a9421-04de-48ba-a757-0a764da1b85b" containerName="controller-manager" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.272145 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.276498 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8"] Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.391058 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-proxy-ca-bundles\") pod \"857a9421-04de-48ba-a757-0a764da1b85b\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.391139 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-config\") pod \"857a9421-04de-48ba-a757-0a764da1b85b\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.391198 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/278d5614-b4ae-4307-bde4-418bc9bdd140-client-ca\") pod \"278d5614-b4ae-4307-bde4-418bc9bdd140\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.391273 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/278d5614-b4ae-4307-bde4-418bc9bdd140-config\") pod \"278d5614-b4ae-4307-bde4-418bc9bdd140\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.391309 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5r5n\" (UniqueName: \"kubernetes.io/projected/278d5614-b4ae-4307-bde4-418bc9bdd140-kube-api-access-b5r5n\") pod \"278d5614-b4ae-4307-bde4-418bc9bdd140\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.391339 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-client-ca\") pod \"857a9421-04de-48ba-a757-0a764da1b85b\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.391414 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/857a9421-04de-48ba-a757-0a764da1b85b-serving-cert\") pod \"857a9421-04de-48ba-a757-0a764da1b85b\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.391966 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-client-ca" (OuterVolumeSpecName: "client-ca") pod "857a9421-04de-48ba-a757-0a764da1b85b" (UID: "857a9421-04de-48ba-a757-0a764da1b85b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.392053 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/278d5614-b4ae-4307-bde4-418bc9bdd140-serving-cert\") pod \"278d5614-b4ae-4307-bde4-418bc9bdd140\" (UID: \"278d5614-b4ae-4307-bde4-418bc9bdd140\") " Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.392165 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/278d5614-b4ae-4307-bde4-418bc9bdd140-client-ca" (OuterVolumeSpecName: "client-ca") pod "278d5614-b4ae-4307-bde4-418bc9bdd140" (UID: "278d5614-b4ae-4307-bde4-418bc9bdd140"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.392486 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/278d5614-b4ae-4307-bde4-418bc9bdd140-config" (OuterVolumeSpecName: "config") pod "278d5614-b4ae-4307-bde4-418bc9bdd140" (UID: "278d5614-b4ae-4307-bde4-418bc9bdd140"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.392621 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "857a9421-04de-48ba-a757-0a764da1b85b" (UID: "857a9421-04de-48ba-a757-0a764da1b85b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.392695 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-psc8q\" (UniqueName: \"kubernetes.io/projected/857a9421-04de-48ba-a757-0a764da1b85b-kube-api-access-psc8q\") pod \"857a9421-04de-48ba-a757-0a764da1b85b\" (UID: \"857a9421-04de-48ba-a757-0a764da1b85b\") " Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.392889 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-config" (OuterVolumeSpecName: "config") pod "857a9421-04de-48ba-a757-0a764da1b85b" (UID: "857a9421-04de-48ba-a757-0a764da1b85b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.392905 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1fc29e9-8179-4d05-8f06-915b420120d6-config\") pod \"route-controller-manager-69d8b4dd4d-28wp8\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.393042 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thx9v\" (UniqueName: \"kubernetes.io/projected/d1fc29e9-8179-4d05-8f06-915b420120d6-kube-api-access-thx9v\") pod \"route-controller-manager-69d8b4dd4d-28wp8\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.393112 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1fc29e9-8179-4d05-8f06-915b420120d6-serving-cert\") pod \"route-controller-manager-69d8b4dd4d-28wp8\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.393238 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1fc29e9-8179-4d05-8f06-915b420120d6-client-ca\") pod \"route-controller-manager-69d8b4dd4d-28wp8\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.393298 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/278d5614-b4ae-4307-bde4-418bc9bdd140-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.393314 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.393327 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.393343 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/857a9421-04de-48ba-a757-0a764da1b85b-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.393356 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/278d5614-b4ae-4307-bde4-418bc9bdd140-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.397289 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/278d5614-b4ae-4307-bde4-418bc9bdd140-kube-api-access-b5r5n" (OuterVolumeSpecName: "kube-api-access-b5r5n") pod "278d5614-b4ae-4307-bde4-418bc9bdd140" (UID: "278d5614-b4ae-4307-bde4-418bc9bdd140"). InnerVolumeSpecName "kube-api-access-b5r5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.397661 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/857a9421-04de-48ba-a757-0a764da1b85b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "857a9421-04de-48ba-a757-0a764da1b85b" (UID: "857a9421-04de-48ba-a757-0a764da1b85b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.398195 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/857a9421-04de-48ba-a757-0a764da1b85b-kube-api-access-psc8q" (OuterVolumeSpecName: "kube-api-access-psc8q") pod "857a9421-04de-48ba-a757-0a764da1b85b" (UID: "857a9421-04de-48ba-a757-0a764da1b85b"). InnerVolumeSpecName "kube-api-access-psc8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.398263 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/278d5614-b4ae-4307-bde4-418bc9bdd140-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "278d5614-b4ae-4307-bde4-418bc9bdd140" (UID: "278d5614-b4ae-4307-bde4-418bc9bdd140"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.494633 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1fc29e9-8179-4d05-8f06-915b420120d6-config\") pod \"route-controller-manager-69d8b4dd4d-28wp8\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.494694 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thx9v\" (UniqueName: \"kubernetes.io/projected/d1fc29e9-8179-4d05-8f06-915b420120d6-kube-api-access-thx9v\") pod \"route-controller-manager-69d8b4dd4d-28wp8\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.494726 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1fc29e9-8179-4d05-8f06-915b420120d6-serving-cert\") pod \"route-controller-manager-69d8b4dd4d-28wp8\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.494759 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1fc29e9-8179-4d05-8f06-915b420120d6-client-ca\") pod \"route-controller-manager-69d8b4dd4d-28wp8\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.494844 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5r5n\" (UniqueName: \"kubernetes.io/projected/278d5614-b4ae-4307-bde4-418bc9bdd140-kube-api-access-b5r5n\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.494980 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/857a9421-04de-48ba-a757-0a764da1b85b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.495176 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/278d5614-b4ae-4307-bde4-418bc9bdd140-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.495258 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-psc8q\" (UniqueName: \"kubernetes.io/projected/857a9421-04de-48ba-a757-0a764da1b85b-kube-api-access-psc8q\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.496193 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1fc29e9-8179-4d05-8f06-915b420120d6-client-ca\") pod \"route-controller-manager-69d8b4dd4d-28wp8\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.499606 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1fc29e9-8179-4d05-8f06-915b420120d6-config\") pod \"route-controller-manager-69d8b4dd4d-28wp8\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.501076 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1fc29e9-8179-4d05-8f06-915b420120d6-serving-cert\") pod \"route-controller-manager-69d8b4dd4d-28wp8\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.518810 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thx9v\" (UniqueName: \"kubernetes.io/projected/d1fc29e9-8179-4d05-8f06-915b420120d6-kube-api-access-thx9v\") pod \"route-controller-manager-69d8b4dd4d-28wp8\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.593938 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.726699 4982 generic.go:334] "Generic (PLEG): container finished" podID="857a9421-04de-48ba-a757-0a764da1b85b" containerID="417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471" exitCode=0 Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.726797 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" event={"ID":"857a9421-04de-48ba-a757-0a764da1b85b","Type":"ContainerDied","Data":"417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471"} Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.726837 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" event={"ID":"857a9421-04de-48ba-a757-0a764da1b85b","Type":"ContainerDied","Data":"13baeefcc0db8b3e9d3d03093cb8b841c533f365d167ba2b79d2804c67a7ad33"} Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.726866 4982 scope.go:117] "RemoveContainer" containerID="417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.727005 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7df4fb797d-pg8z4" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.735694 4982 generic.go:334] "Generic (PLEG): container finished" podID="278d5614-b4ae-4307-bde4-418bc9bdd140" containerID="ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb" exitCode=0 Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.735757 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" event={"ID":"278d5614-b4ae-4307-bde4-418bc9bdd140","Type":"ContainerDied","Data":"ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb"} Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.735805 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" event={"ID":"278d5614-b4ae-4307-bde4-418bc9bdd140","Type":"ContainerDied","Data":"e4669ddd8a18716f22afba7d9b359576536f8c994c0c329bf07b1b93b26a9c02"} Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.735906 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.769641 4982 scope.go:117] "RemoveContainer" containerID="417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471" Dec 05 19:19:29 crc kubenswrapper[4982]: E1205 19:19:29.771016 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471\": container with ID starting with 417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471 not found: ID does not exist" containerID="417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.771073 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471"} err="failed to get container status \"417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471\": rpc error: code = NotFound desc = could not find container \"417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471\": container with ID starting with 417787da9d006ab8f499610c54daee6911bb4b06dc795f92fbaa16819dd13471 not found: ID does not exist" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.771102 4982 scope.go:117] "RemoveContainer" containerID="ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.782547 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7df4fb797d-pg8z4"] Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.791806 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7df4fb797d-pg8z4"] Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.798289 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx"] Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.800069 4982 scope.go:117] "RemoveContainer" containerID="ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb" Dec 05 19:19:29 crc kubenswrapper[4982]: E1205 19:19:29.803759 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb\": container with ID starting with ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb not found: ID does not exist" containerID="ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.803958 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb"} err="failed to get container status \"ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb\": rpc error: code = NotFound desc = could not find container \"ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb\": container with ID starting with ca4c176d3653a05b9141cbe172c9f7dfa96eb5657deddb7645f450a29398f7cb not found: ID does not exist" Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.804608 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bf67cfb8c-79wtx"] Dec 05 19:19:29 crc kubenswrapper[4982]: I1205 19:19:29.871353 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8"] Dec 05 19:19:30 crc kubenswrapper[4982]: I1205 19:19:30.745987 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" event={"ID":"d1fc29e9-8179-4d05-8f06-915b420120d6","Type":"ContainerStarted","Data":"a4c2f85bf1437ba052a99bd2640bb36f11dda8b1f80923f6729ab62c6749b203"} Dec 05 19:19:30 crc kubenswrapper[4982]: I1205 19:19:30.746433 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" event={"ID":"d1fc29e9-8179-4d05-8f06-915b420120d6","Type":"ContainerStarted","Data":"ba3d3919ebf4cb882a8ccfcfcff95be985936c17398bd75352f2e259aaae0b1d"} Dec 05 19:19:30 crc kubenswrapper[4982]: I1205 19:19:30.746460 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:30 crc kubenswrapper[4982]: I1205 19:19:30.754491 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:19:30 crc kubenswrapper[4982]: I1205 19:19:30.776087 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" podStartSLOduration=3.776059188 podStartE2EDuration="3.776059188s" podCreationTimestamp="2025-12-05 19:19:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:19:30.76544969 +0000 UTC m=+349.647335725" watchObservedRunningTime="2025-12-05 19:19:30.776059188 +0000 UTC m=+349.657945223" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.399069 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="278d5614-b4ae-4307-bde4-418bc9bdd140" path="/var/lib/kubelet/pods/278d5614-b4ae-4307-bde4-418bc9bdd140/volumes" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.400576 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="857a9421-04de-48ba-a757-0a764da1b85b" path="/var/lib/kubelet/pods/857a9421-04de-48ba-a757-0a764da1b85b/volumes" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.534206 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-58498d946f-gnmdt"] Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.535823 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.541130 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.542192 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.542491 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.542745 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.542971 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.548517 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58498d946f-gnmdt"] Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.549047 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.557395 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.641572 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a29e889d-7fb4-4c41-818f-6442948365b2-serving-cert\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.641649 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqdzn\" (UniqueName: \"kubernetes.io/projected/a29e889d-7fb4-4c41-818f-6442948365b2-kube-api-access-tqdzn\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.641717 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-proxy-ca-bundles\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.641748 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-config\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.641772 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-client-ca\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.743704 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-config\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.743801 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-client-ca\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.743962 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a29e889d-7fb4-4c41-818f-6442948365b2-serving-cert\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.744011 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqdzn\" (UniqueName: \"kubernetes.io/projected/a29e889d-7fb4-4c41-818f-6442948365b2-kube-api-access-tqdzn\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.744097 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-proxy-ca-bundles\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.745256 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-client-ca\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.746244 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-proxy-ca-bundles\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.748075 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-config\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.751644 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a29e889d-7fb4-4c41-818f-6442948365b2-serving-cert\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.776683 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqdzn\" (UniqueName: \"kubernetes.io/projected/a29e889d-7fb4-4c41-818f-6442948365b2-kube-api-access-tqdzn\") pod \"controller-manager-58498d946f-gnmdt\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:31 crc kubenswrapper[4982]: I1205 19:19:31.869186 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:32 crc kubenswrapper[4982]: I1205 19:19:32.089777 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58498d946f-gnmdt"] Dec 05 19:19:32 crc kubenswrapper[4982]: W1205 19:19:32.094586 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda29e889d_7fb4_4c41_818f_6442948365b2.slice/crio-3626e1847c381654401d122cc01adc31af5768602033ed50fcaeec30a6fd9c77 WatchSource:0}: Error finding container 3626e1847c381654401d122cc01adc31af5768602033ed50fcaeec30a6fd9c77: Status 404 returned error can't find the container with id 3626e1847c381654401d122cc01adc31af5768602033ed50fcaeec30a6fd9c77 Dec 05 19:19:32 crc kubenswrapper[4982]: I1205 19:19:32.771081 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" event={"ID":"a29e889d-7fb4-4c41-818f-6442948365b2","Type":"ContainerStarted","Data":"3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2"} Dec 05 19:19:32 crc kubenswrapper[4982]: I1205 19:19:32.771183 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" event={"ID":"a29e889d-7fb4-4c41-818f-6442948365b2","Type":"ContainerStarted","Data":"3626e1847c381654401d122cc01adc31af5768602033ed50fcaeec30a6fd9c77"} Dec 05 19:19:32 crc kubenswrapper[4982]: I1205 19:19:32.792869 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" podStartSLOduration=5.7928518 podStartE2EDuration="5.7928518s" podCreationTimestamp="2025-12-05 19:19:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:19:32.790264014 +0000 UTC m=+351.672150019" watchObservedRunningTime="2025-12-05 19:19:32.7928518 +0000 UTC m=+351.674737795" Dec 05 19:19:33 crc kubenswrapper[4982]: I1205 19:19:33.778130 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:33 crc kubenswrapper[4982]: I1205 19:19:33.784318 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.289308 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lhxlk"] Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.289972 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-lhxlk" podUID="b9954db1-336a-4478-869c-080166403adb" containerName="registry-server" containerID="cri-o://15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5" gracePeriod=2 Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.726335 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.768079 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9954db1-336a-4478-869c-080166403adb-catalog-content\") pod \"b9954db1-336a-4478-869c-080166403adb\" (UID: \"b9954db1-336a-4478-869c-080166403adb\") " Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.768139 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9954db1-336a-4478-869c-080166403adb-utilities\") pod \"b9954db1-336a-4478-869c-080166403adb\" (UID: \"b9954db1-336a-4478-869c-080166403adb\") " Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.768295 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2rpz\" (UniqueName: \"kubernetes.io/projected/b9954db1-336a-4478-869c-080166403adb-kube-api-access-z2rpz\") pod \"b9954db1-336a-4478-869c-080166403adb\" (UID: \"b9954db1-336a-4478-869c-080166403adb\") " Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.769075 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9954db1-336a-4478-869c-080166403adb-utilities" (OuterVolumeSpecName: "utilities") pod "b9954db1-336a-4478-869c-080166403adb" (UID: "b9954db1-336a-4478-869c-080166403adb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.774048 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9954db1-336a-4478-869c-080166403adb-kube-api-access-z2rpz" (OuterVolumeSpecName: "kube-api-access-z2rpz") pod "b9954db1-336a-4478-869c-080166403adb" (UID: "b9954db1-336a-4478-869c-080166403adb"). InnerVolumeSpecName "kube-api-access-z2rpz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.790052 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9954db1-336a-4478-869c-080166403adb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b9954db1-336a-4478-869c-080166403adb" (UID: "b9954db1-336a-4478-869c-080166403adb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.814363 4982 generic.go:334] "Generic (PLEG): container finished" podID="b9954db1-336a-4478-869c-080166403adb" containerID="15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5" exitCode=0 Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.814399 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lhxlk" event={"ID":"b9954db1-336a-4478-869c-080166403adb","Type":"ContainerDied","Data":"15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5"} Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.814427 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lhxlk" event={"ID":"b9954db1-336a-4478-869c-080166403adb","Type":"ContainerDied","Data":"26949e11024444cd11ff33bd1233f5c321228f66a15e5dd08efdc893ebd54a8b"} Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.814449 4982 scope.go:117] "RemoveContainer" containerID="15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.814483 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lhxlk" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.833934 4982 scope.go:117] "RemoveContainer" containerID="018cd2ae89257d15cf32dfd4ce51657e9d1e0da10b5cd25a742b7562b7b11903" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.852267 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-lhxlk"] Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.852603 4982 scope.go:117] "RemoveContainer" containerID="a791bde7e48fe024405738a08285715e3857d84a31c2aa58806698a8a2b10dce" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.856551 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-lhxlk"] Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.869465 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b9954db1-336a-4478-869c-080166403adb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.869492 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b9954db1-336a-4478-869c-080166403adb-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.869503 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2rpz\" (UniqueName: \"kubernetes.io/projected/b9954db1-336a-4478-869c-080166403adb-kube-api-access-z2rpz\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.886902 4982 scope.go:117] "RemoveContainer" containerID="15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5" Dec 05 19:19:39 crc kubenswrapper[4982]: E1205 19:19:39.887339 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5\": container with ID starting with 15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5 not found: ID does not exist" containerID="15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.887396 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5"} err="failed to get container status \"15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5\": rpc error: code = NotFound desc = could not find container \"15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5\": container with ID starting with 15f21bad6467e5fae6b0bb7672b55799e40f7890bc7d6d5790dc1b739f8c21a5 not found: ID does not exist" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.887421 4982 scope.go:117] "RemoveContainer" containerID="018cd2ae89257d15cf32dfd4ce51657e9d1e0da10b5cd25a742b7562b7b11903" Dec 05 19:19:39 crc kubenswrapper[4982]: E1205 19:19:39.887731 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"018cd2ae89257d15cf32dfd4ce51657e9d1e0da10b5cd25a742b7562b7b11903\": container with ID starting with 018cd2ae89257d15cf32dfd4ce51657e9d1e0da10b5cd25a742b7562b7b11903 not found: ID does not exist" containerID="018cd2ae89257d15cf32dfd4ce51657e9d1e0da10b5cd25a742b7562b7b11903" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.887796 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"018cd2ae89257d15cf32dfd4ce51657e9d1e0da10b5cd25a742b7562b7b11903"} err="failed to get container status \"018cd2ae89257d15cf32dfd4ce51657e9d1e0da10b5cd25a742b7562b7b11903\": rpc error: code = NotFound desc = could not find container \"018cd2ae89257d15cf32dfd4ce51657e9d1e0da10b5cd25a742b7562b7b11903\": container with ID starting with 018cd2ae89257d15cf32dfd4ce51657e9d1e0da10b5cd25a742b7562b7b11903 not found: ID does not exist" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.887838 4982 scope.go:117] "RemoveContainer" containerID="a791bde7e48fe024405738a08285715e3857d84a31c2aa58806698a8a2b10dce" Dec 05 19:19:39 crc kubenswrapper[4982]: E1205 19:19:39.888185 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a791bde7e48fe024405738a08285715e3857d84a31c2aa58806698a8a2b10dce\": container with ID starting with a791bde7e48fe024405738a08285715e3857d84a31c2aa58806698a8a2b10dce not found: ID does not exist" containerID="a791bde7e48fe024405738a08285715e3857d84a31c2aa58806698a8a2b10dce" Dec 05 19:19:39 crc kubenswrapper[4982]: I1205 19:19:39.888241 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a791bde7e48fe024405738a08285715e3857d84a31c2aa58806698a8a2b10dce"} err="failed to get container status \"a791bde7e48fe024405738a08285715e3857d84a31c2aa58806698a8a2b10dce\": rpc error: code = NotFound desc = could not find container \"a791bde7e48fe024405738a08285715e3857d84a31c2aa58806698a8a2b10dce\": container with ID starting with a791bde7e48fe024405738a08285715e3857d84a31c2aa58806698a8a2b10dce not found: ID does not exist" Dec 05 19:19:41 crc kubenswrapper[4982]: I1205 19:19:41.400970 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9954db1-336a-4478-869c-080166403adb" path="/var/lib/kubelet/pods/b9954db1-336a-4478-869c-080166403adb/volumes" Dec 05 19:19:42 crc kubenswrapper[4982]: I1205 19:19:42.557535 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:19:42 crc kubenswrapper[4982]: I1205 19:19:42.557616 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:19:44 crc kubenswrapper[4982]: I1205 19:19:44.766256 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-58498d946f-gnmdt"] Dec 05 19:19:44 crc kubenswrapper[4982]: I1205 19:19:44.766675 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" podUID="a29e889d-7fb4-4c41-818f-6442948365b2" containerName="controller-manager" containerID="cri-o://3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2" gracePeriod=30 Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.338742 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.441294 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-proxy-ca-bundles\") pod \"a29e889d-7fb4-4c41-818f-6442948365b2\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.441341 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a29e889d-7fb4-4c41-818f-6442948365b2-serving-cert\") pod \"a29e889d-7fb4-4c41-818f-6442948365b2\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.441361 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-config\") pod \"a29e889d-7fb4-4c41-818f-6442948365b2\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.441438 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-client-ca\") pod \"a29e889d-7fb4-4c41-818f-6442948365b2\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.441472 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqdzn\" (UniqueName: \"kubernetes.io/projected/a29e889d-7fb4-4c41-818f-6442948365b2-kube-api-access-tqdzn\") pod \"a29e889d-7fb4-4c41-818f-6442948365b2\" (UID: \"a29e889d-7fb4-4c41-818f-6442948365b2\") " Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.442249 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "a29e889d-7fb4-4c41-818f-6442948365b2" (UID: "a29e889d-7fb4-4c41-818f-6442948365b2"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.446722 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-client-ca" (OuterVolumeSpecName: "client-ca") pod "a29e889d-7fb4-4c41-818f-6442948365b2" (UID: "a29e889d-7fb4-4c41-818f-6442948365b2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.449239 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a29e889d-7fb4-4c41-818f-6442948365b2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a29e889d-7fb4-4c41-818f-6442948365b2" (UID: "a29e889d-7fb4-4c41-818f-6442948365b2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.449955 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-config" (OuterVolumeSpecName: "config") pod "a29e889d-7fb4-4c41-818f-6442948365b2" (UID: "a29e889d-7fb4-4c41-818f-6442948365b2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.453843 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a29e889d-7fb4-4c41-818f-6442948365b2-kube-api-access-tqdzn" (OuterVolumeSpecName: "kube-api-access-tqdzn") pod "a29e889d-7fb4-4c41-818f-6442948365b2" (UID: "a29e889d-7fb4-4c41-818f-6442948365b2"). InnerVolumeSpecName "kube-api-access-tqdzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.543297 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a29e889d-7fb4-4c41-818f-6442948365b2-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.543333 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.543342 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.543353 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqdzn\" (UniqueName: \"kubernetes.io/projected/a29e889d-7fb4-4c41-818f-6442948365b2-kube-api-access-tqdzn\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.543364 4982 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a29e889d-7fb4-4c41-818f-6442948365b2-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.860938 4982 generic.go:334] "Generic (PLEG): container finished" podID="a29e889d-7fb4-4c41-818f-6442948365b2" containerID="3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2" exitCode=0 Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.860989 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" event={"ID":"a29e889d-7fb4-4c41-818f-6442948365b2","Type":"ContainerDied","Data":"3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2"} Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.861138 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" event={"ID":"a29e889d-7fb4-4c41-818f-6442948365b2","Type":"ContainerDied","Data":"3626e1847c381654401d122cc01adc31af5768602033ed50fcaeec30a6fd9c77"} Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.861781 4982 scope.go:117] "RemoveContainer" containerID="3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.861861 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58498d946f-gnmdt" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.887874 4982 scope.go:117] "RemoveContainer" containerID="3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2" Dec 05 19:19:45 crc kubenswrapper[4982]: E1205 19:19:45.888516 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2\": container with ID starting with 3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2 not found: ID does not exist" containerID="3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.888556 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2"} err="failed to get container status \"3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2\": rpc error: code = NotFound desc = could not find container \"3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2\": container with ID starting with 3428f72e6c802fca3c4f1212cb31321104d1584a86492e2226cc215a77a25db2 not found: ID does not exist" Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.892088 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-58498d946f-gnmdt"] Dec 05 19:19:45 crc kubenswrapper[4982]: I1205 19:19:45.899538 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-58498d946f-gnmdt"] Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.536468 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7df4fb797d-gr8wn"] Dec 05 19:19:46 crc kubenswrapper[4982]: E1205 19:19:46.536666 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9954db1-336a-4478-869c-080166403adb" containerName="extract-content" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.536678 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9954db1-336a-4478-869c-080166403adb" containerName="extract-content" Dec 05 19:19:46 crc kubenswrapper[4982]: E1205 19:19:46.536686 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9954db1-336a-4478-869c-080166403adb" containerName="registry-server" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.536693 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9954db1-336a-4478-869c-080166403adb" containerName="registry-server" Dec 05 19:19:46 crc kubenswrapper[4982]: E1205 19:19:46.536708 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9954db1-336a-4478-869c-080166403adb" containerName="extract-utilities" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.536715 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9954db1-336a-4478-869c-080166403adb" containerName="extract-utilities" Dec 05 19:19:46 crc kubenswrapper[4982]: E1205 19:19:46.536723 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a29e889d-7fb4-4c41-818f-6442948365b2" containerName="controller-manager" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.536729 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a29e889d-7fb4-4c41-818f-6442948365b2" containerName="controller-manager" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.536816 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9954db1-336a-4478-869c-080166403adb" containerName="registry-server" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.536832 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a29e889d-7fb4-4c41-818f-6442948365b2" containerName="controller-manager" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.537227 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.538723 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.539291 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.539376 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.539440 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.539731 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.540891 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.548071 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.555031 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a558508a-0116-4460-ac9c-581ba5ee9605-serving-cert\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.555084 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a558508a-0116-4460-ac9c-581ba5ee9605-proxy-ca-bundles\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.555128 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv5zc\" (UniqueName: \"kubernetes.io/projected/a558508a-0116-4460-ac9c-581ba5ee9605-kube-api-access-nv5zc\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.555179 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a558508a-0116-4460-ac9c-581ba5ee9605-config\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.555220 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a558508a-0116-4460-ac9c-581ba5ee9605-client-ca\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.556184 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7df4fb797d-gr8wn"] Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.655756 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a558508a-0116-4460-ac9c-581ba5ee9605-proxy-ca-bundles\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.655874 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv5zc\" (UniqueName: \"kubernetes.io/projected/a558508a-0116-4460-ac9c-581ba5ee9605-kube-api-access-nv5zc\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.655943 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a558508a-0116-4460-ac9c-581ba5ee9605-config\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.655989 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a558508a-0116-4460-ac9c-581ba5ee9605-client-ca\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.656118 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a558508a-0116-4460-ac9c-581ba5ee9605-serving-cert\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.657211 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a558508a-0116-4460-ac9c-581ba5ee9605-client-ca\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.657861 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a558508a-0116-4460-ac9c-581ba5ee9605-proxy-ca-bundles\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.658552 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a558508a-0116-4460-ac9c-581ba5ee9605-config\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.667032 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a558508a-0116-4460-ac9c-581ba5ee9605-serving-cert\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.684229 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv5zc\" (UniqueName: \"kubernetes.io/projected/a558508a-0116-4460-ac9c-581ba5ee9605-kube-api-access-nv5zc\") pod \"controller-manager-7df4fb797d-gr8wn\" (UID: \"a558508a-0116-4460-ac9c-581ba5ee9605\") " pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:46 crc kubenswrapper[4982]: I1205 19:19:46.855208 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:47 crc kubenswrapper[4982]: I1205 19:19:47.250330 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7df4fb797d-gr8wn"] Dec 05 19:19:47 crc kubenswrapper[4982]: I1205 19:19:47.399110 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a29e889d-7fb4-4c41-818f-6442948365b2" path="/var/lib/kubelet/pods/a29e889d-7fb4-4c41-818f-6442948365b2/volumes" Dec 05 19:19:47 crc kubenswrapper[4982]: I1205 19:19:47.873416 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" event={"ID":"a558508a-0116-4460-ac9c-581ba5ee9605","Type":"ContainerStarted","Data":"cdbb8eb8951d63e7623750af0e63db820f28a70f8eef9b9772e1c4ef4bf26ab9"} Dec 05 19:19:47 crc kubenswrapper[4982]: I1205 19:19:47.873460 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" event={"ID":"a558508a-0116-4460-ac9c-581ba5ee9605","Type":"ContainerStarted","Data":"c21813e30c707e630019962659ce6bf4d3eda08b9b3812a1e270a5502c302a3c"} Dec 05 19:19:47 crc kubenswrapper[4982]: I1205 19:19:47.873787 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:47 crc kubenswrapper[4982]: I1205 19:19:47.877548 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" Dec 05 19:19:47 crc kubenswrapper[4982]: I1205 19:19:47.895574 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7df4fb797d-gr8wn" podStartSLOduration=3.8955591800000002 podStartE2EDuration="3.89555918s" podCreationTimestamp="2025-12-05 19:19:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:19:47.894874883 +0000 UTC m=+366.776760878" watchObservedRunningTime="2025-12-05 19:19:47.89555918 +0000 UTC m=+366.777445175" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.471547 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-v82hh"] Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.472724 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.488802 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-v82hh"] Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.630880 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62541575-4aec-4017-8925-63ffaa4c9fb5-trusted-ca\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.630954 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.640137 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/62541575-4aec-4017-8925-63ffaa4c9fb5-bound-sa-token\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.640265 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/62541575-4aec-4017-8925-63ffaa4c9fb5-ca-trust-extracted\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.641527 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/62541575-4aec-4017-8925-63ffaa4c9fb5-installation-pull-secrets\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.641685 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsf4q\" (UniqueName: \"kubernetes.io/projected/62541575-4aec-4017-8925-63ffaa4c9fb5-kube-api-access-fsf4q\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.641760 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/62541575-4aec-4017-8925-63ffaa4c9fb5-registry-tls\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.641803 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/62541575-4aec-4017-8925-63ffaa4c9fb5-registry-certificates\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.733399 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.742883 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsf4q\" (UniqueName: \"kubernetes.io/projected/62541575-4aec-4017-8925-63ffaa4c9fb5-kube-api-access-fsf4q\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.742949 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/62541575-4aec-4017-8925-63ffaa4c9fb5-registry-tls\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.742975 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/62541575-4aec-4017-8925-63ffaa4c9fb5-registry-certificates\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.743020 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62541575-4aec-4017-8925-63ffaa4c9fb5-trusted-ca\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.743061 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/62541575-4aec-4017-8925-63ffaa4c9fb5-bound-sa-token\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.743086 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/62541575-4aec-4017-8925-63ffaa4c9fb5-ca-trust-extracted\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.743111 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/62541575-4aec-4017-8925-63ffaa4c9fb5-installation-pull-secrets\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.745285 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/62541575-4aec-4017-8925-63ffaa4c9fb5-trusted-ca\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.746386 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/62541575-4aec-4017-8925-63ffaa4c9fb5-registry-certificates\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.746815 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/62541575-4aec-4017-8925-63ffaa4c9fb5-ca-trust-extracted\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.749365 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/62541575-4aec-4017-8925-63ffaa4c9fb5-registry-tls\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.755228 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/62541575-4aec-4017-8925-63ffaa4c9fb5-installation-pull-secrets\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.758455 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsf4q\" (UniqueName: \"kubernetes.io/projected/62541575-4aec-4017-8925-63ffaa4c9fb5-kube-api-access-fsf4q\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.762237 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/62541575-4aec-4017-8925-63ffaa4c9fb5-bound-sa-token\") pod \"image-registry-66df7c8f76-v82hh\" (UID: \"62541575-4aec-4017-8925-63ffaa4c9fb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:49 crc kubenswrapper[4982]: I1205 19:19:49.838856 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:50 crc kubenswrapper[4982]: I1205 19:19:50.337589 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-v82hh"] Dec 05 19:19:50 crc kubenswrapper[4982]: W1205 19:19:50.342736 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod62541575_4aec_4017_8925_63ffaa4c9fb5.slice/crio-965934c25e8c80daf8ef3bd10dac0a8f11559ab847c3682d63fd3d5875d572e9 WatchSource:0}: Error finding container 965934c25e8c80daf8ef3bd10dac0a8f11559ab847c3682d63fd3d5875d572e9: Status 404 returned error can't find the container with id 965934c25e8c80daf8ef3bd10dac0a8f11559ab847c3682d63fd3d5875d572e9 Dec 05 19:19:50 crc kubenswrapper[4982]: I1205 19:19:50.888527 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" event={"ID":"62541575-4aec-4017-8925-63ffaa4c9fb5","Type":"ContainerStarted","Data":"6fa03e0810a4c7f8eca947b93625225dac6d3d064866ae60712784ff208d0e91"} Dec 05 19:19:50 crc kubenswrapper[4982]: I1205 19:19:50.888911 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" event={"ID":"62541575-4aec-4017-8925-63ffaa4c9fb5","Type":"ContainerStarted","Data":"965934c25e8c80daf8ef3bd10dac0a8f11559ab847c3682d63fd3d5875d572e9"} Dec 05 19:19:50 crc kubenswrapper[4982]: I1205 19:19:50.888934 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:19:50 crc kubenswrapper[4982]: I1205 19:19:50.910088 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" podStartSLOduration=1.910067512 podStartE2EDuration="1.910067512s" podCreationTimestamp="2025-12-05 19:19:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:19:50.908886922 +0000 UTC m=+369.790772927" watchObservedRunningTime="2025-12-05 19:19:50.910067512 +0000 UTC m=+369.791953517" Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.345518 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dzwxm"] Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.346640 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dzwxm" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" containerName="registry-server" containerID="cri-o://3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24" gracePeriod=2 Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.912517 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.937601 4982 generic.go:334] "Generic (PLEG): container finished" podID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" containerID="3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24" exitCode=0 Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.937651 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzwxm" event={"ID":"aab55c2a-f07b-4f88-b89b-fe417ff42c27","Type":"ContainerDied","Data":"3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24"} Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.937678 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dzwxm" Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.937739 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dzwxm" event={"ID":"aab55c2a-f07b-4f88-b89b-fe417ff42c27","Type":"ContainerDied","Data":"ba7d6d063e4f8b98837a681e8385f4b6d62ce52ed7a05c47acef753bd440f7fc"} Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.937780 4982 scope.go:117] "RemoveContainer" containerID="3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24" Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.952606 4982 scope.go:117] "RemoveContainer" containerID="806ab79ebe57359ff3016635f4539375465a4adff70384e1cc5ff41c22ffa58a" Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.971469 4982 scope.go:117] "RemoveContainer" containerID="6023170c3fa0f86bb814f82e87c505bdf73774ee8472dd4557c6a4d06052c851" Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.985791 4982 scope.go:117] "RemoveContainer" containerID="3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24" Dec 05 19:19:58 crc kubenswrapper[4982]: E1205 19:19:58.986248 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24\": container with ID starting with 3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24 not found: ID does not exist" containerID="3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24" Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.986302 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24"} err="failed to get container status \"3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24\": rpc error: code = NotFound desc = could not find container \"3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24\": container with ID starting with 3e0d0997db48aed5799e2da492e2b52d7f9214779addd7258ecf0d2842464a24 not found: ID does not exist" Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.986339 4982 scope.go:117] "RemoveContainer" containerID="806ab79ebe57359ff3016635f4539375465a4adff70384e1cc5ff41c22ffa58a" Dec 05 19:19:58 crc kubenswrapper[4982]: E1205 19:19:58.986674 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"806ab79ebe57359ff3016635f4539375465a4adff70384e1cc5ff41c22ffa58a\": container with ID starting with 806ab79ebe57359ff3016635f4539375465a4adff70384e1cc5ff41c22ffa58a not found: ID does not exist" containerID="806ab79ebe57359ff3016635f4539375465a4adff70384e1cc5ff41c22ffa58a" Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.986698 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"806ab79ebe57359ff3016635f4539375465a4adff70384e1cc5ff41c22ffa58a"} err="failed to get container status \"806ab79ebe57359ff3016635f4539375465a4adff70384e1cc5ff41c22ffa58a\": rpc error: code = NotFound desc = could not find container \"806ab79ebe57359ff3016635f4539375465a4adff70384e1cc5ff41c22ffa58a\": container with ID starting with 806ab79ebe57359ff3016635f4539375465a4adff70384e1cc5ff41c22ffa58a not found: ID does not exist" Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.986714 4982 scope.go:117] "RemoveContainer" containerID="6023170c3fa0f86bb814f82e87c505bdf73774ee8472dd4557c6a4d06052c851" Dec 05 19:19:58 crc kubenswrapper[4982]: E1205 19:19:58.986964 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6023170c3fa0f86bb814f82e87c505bdf73774ee8472dd4557c6a4d06052c851\": container with ID starting with 6023170c3fa0f86bb814f82e87c505bdf73774ee8472dd4557c6a4d06052c851 not found: ID does not exist" containerID="6023170c3fa0f86bb814f82e87c505bdf73774ee8472dd4557c6a4d06052c851" Dec 05 19:19:58 crc kubenswrapper[4982]: I1205 19:19:58.986986 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6023170c3fa0f86bb814f82e87c505bdf73774ee8472dd4557c6a4d06052c851"} err="failed to get container status \"6023170c3fa0f86bb814f82e87c505bdf73774ee8472dd4557c6a4d06052c851\": rpc error: code = NotFound desc = could not find container \"6023170c3fa0f86bb814f82e87c505bdf73774ee8472dd4557c6a4d06052c851\": container with ID starting with 6023170c3fa0f86bb814f82e87c505bdf73774ee8472dd4557c6a4d06052c851 not found: ID does not exist" Dec 05 19:19:59 crc kubenswrapper[4982]: I1205 19:19:59.097956 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aab55c2a-f07b-4f88-b89b-fe417ff42c27-utilities\") pod \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\" (UID: \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\") " Dec 05 19:19:59 crc kubenswrapper[4982]: I1205 19:19:59.098110 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2s6bf\" (UniqueName: \"kubernetes.io/projected/aab55c2a-f07b-4f88-b89b-fe417ff42c27-kube-api-access-2s6bf\") pod \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\" (UID: \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\") " Dec 05 19:19:59 crc kubenswrapper[4982]: I1205 19:19:59.098236 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aab55c2a-f07b-4f88-b89b-fe417ff42c27-catalog-content\") pod \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\" (UID: \"aab55c2a-f07b-4f88-b89b-fe417ff42c27\") " Dec 05 19:19:59 crc kubenswrapper[4982]: I1205 19:19:59.101350 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aab55c2a-f07b-4f88-b89b-fe417ff42c27-utilities" (OuterVolumeSpecName: "utilities") pod "aab55c2a-f07b-4f88-b89b-fe417ff42c27" (UID: "aab55c2a-f07b-4f88-b89b-fe417ff42c27"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:19:59 crc kubenswrapper[4982]: I1205 19:19:59.104302 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aab55c2a-f07b-4f88-b89b-fe417ff42c27-kube-api-access-2s6bf" (OuterVolumeSpecName: "kube-api-access-2s6bf") pod "aab55c2a-f07b-4f88-b89b-fe417ff42c27" (UID: "aab55c2a-f07b-4f88-b89b-fe417ff42c27"). InnerVolumeSpecName "kube-api-access-2s6bf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:19:59 crc kubenswrapper[4982]: I1205 19:19:59.160677 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aab55c2a-f07b-4f88-b89b-fe417ff42c27-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aab55c2a-f07b-4f88-b89b-fe417ff42c27" (UID: "aab55c2a-f07b-4f88-b89b-fe417ff42c27"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:19:59 crc kubenswrapper[4982]: I1205 19:19:59.201236 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aab55c2a-f07b-4f88-b89b-fe417ff42c27-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:59 crc kubenswrapper[4982]: I1205 19:19:59.201306 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2s6bf\" (UniqueName: \"kubernetes.io/projected/aab55c2a-f07b-4f88-b89b-fe417ff42c27-kube-api-access-2s6bf\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:59 crc kubenswrapper[4982]: I1205 19:19:59.201329 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aab55c2a-f07b-4f88-b89b-fe417ff42c27-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:19:59 crc kubenswrapper[4982]: I1205 19:19:59.299939 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dzwxm"] Dec 05 19:19:59 crc kubenswrapper[4982]: I1205 19:19:59.307561 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dzwxm"] Dec 05 19:19:59 crc kubenswrapper[4982]: I1205 19:19:59.398493 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" path="/var/lib/kubelet/pods/aab55c2a-f07b-4f88-b89b-fe417ff42c27/volumes" Dec 05 19:20:04 crc kubenswrapper[4982]: I1205 19:20:04.752469 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8"] Dec 05 19:20:04 crc kubenswrapper[4982]: I1205 19:20:04.753254 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" podUID="d1fc29e9-8179-4d05-8f06-915b420120d6" containerName="route-controller-manager" containerID="cri-o://a4c2f85bf1437ba052a99bd2640bb36f11dda8b1f80923f6729ab62c6749b203" gracePeriod=30 Dec 05 19:20:04 crc kubenswrapper[4982]: I1205 19:20:04.978083 4982 generic.go:334] "Generic (PLEG): container finished" podID="d1fc29e9-8179-4d05-8f06-915b420120d6" containerID="a4c2f85bf1437ba052a99bd2640bb36f11dda8b1f80923f6729ab62c6749b203" exitCode=0 Dec 05 19:20:04 crc kubenswrapper[4982]: I1205 19:20:04.978166 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" event={"ID":"d1fc29e9-8179-4d05-8f06-915b420120d6","Type":"ContainerDied","Data":"a4c2f85bf1437ba052a99bd2640bb36f11dda8b1f80923f6729ab62c6749b203"} Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.258571 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.391224 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1fc29e9-8179-4d05-8f06-915b420120d6-config\") pod \"d1fc29e9-8179-4d05-8f06-915b420120d6\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.391314 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thx9v\" (UniqueName: \"kubernetes.io/projected/d1fc29e9-8179-4d05-8f06-915b420120d6-kube-api-access-thx9v\") pod \"d1fc29e9-8179-4d05-8f06-915b420120d6\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.391383 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1fc29e9-8179-4d05-8f06-915b420120d6-serving-cert\") pod \"d1fc29e9-8179-4d05-8f06-915b420120d6\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.391421 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1fc29e9-8179-4d05-8f06-915b420120d6-client-ca\") pod \"d1fc29e9-8179-4d05-8f06-915b420120d6\" (UID: \"d1fc29e9-8179-4d05-8f06-915b420120d6\") " Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.392390 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1fc29e9-8179-4d05-8f06-915b420120d6-client-ca" (OuterVolumeSpecName: "client-ca") pod "d1fc29e9-8179-4d05-8f06-915b420120d6" (UID: "d1fc29e9-8179-4d05-8f06-915b420120d6"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.392474 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1fc29e9-8179-4d05-8f06-915b420120d6-config" (OuterVolumeSpecName: "config") pod "d1fc29e9-8179-4d05-8f06-915b420120d6" (UID: "d1fc29e9-8179-4d05-8f06-915b420120d6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.398360 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1fc29e9-8179-4d05-8f06-915b420120d6-kube-api-access-thx9v" (OuterVolumeSpecName: "kube-api-access-thx9v") pod "d1fc29e9-8179-4d05-8f06-915b420120d6" (UID: "d1fc29e9-8179-4d05-8f06-915b420120d6"). InnerVolumeSpecName "kube-api-access-thx9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.398380 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d1fc29e9-8179-4d05-8f06-915b420120d6-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d1fc29e9-8179-4d05-8f06-915b420120d6" (UID: "d1fc29e9-8179-4d05-8f06-915b420120d6"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.493137 4982 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d1fc29e9-8179-4d05-8f06-915b420120d6-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.493792 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1fc29e9-8179-4d05-8f06-915b420120d6-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.494027 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thx9v\" (UniqueName: \"kubernetes.io/projected/d1fc29e9-8179-4d05-8f06-915b420120d6-kube-api-access-thx9v\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.494102 4982 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d1fc29e9-8179-4d05-8f06-915b420120d6-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.985505 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" event={"ID":"d1fc29e9-8179-4d05-8f06-915b420120d6","Type":"ContainerDied","Data":"ba3d3919ebf4cb882a8ccfcfcff95be985936c17398bd75352f2e259aaae0b1d"} Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.985595 4982 scope.go:117] "RemoveContainer" containerID="a4c2f85bf1437ba052a99bd2640bb36f11dda8b1f80923f6729ab62c6749b203" Dec 05 19:20:05 crc kubenswrapper[4982]: I1205 19:20:05.985659 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.009926 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8"] Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.016436 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69d8b4dd4d-28wp8"] Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.557225 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9"] Dec 05 19:20:06 crc kubenswrapper[4982]: E1205 19:20:06.557483 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1fc29e9-8179-4d05-8f06-915b420120d6" containerName="route-controller-manager" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.557498 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1fc29e9-8179-4d05-8f06-915b420120d6" containerName="route-controller-manager" Dec 05 19:20:06 crc kubenswrapper[4982]: E1205 19:20:06.557513 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" containerName="extract-content" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.557522 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" containerName="extract-content" Dec 05 19:20:06 crc kubenswrapper[4982]: E1205 19:20:06.557535 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" containerName="extract-utilities" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.557543 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" containerName="extract-utilities" Dec 05 19:20:06 crc kubenswrapper[4982]: E1205 19:20:06.557559 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" containerName="registry-server" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.557568 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" containerName="registry-server" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.557685 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1fc29e9-8179-4d05-8f06-915b420120d6" containerName="route-controller-manager" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.557701 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="aab55c2a-f07b-4f88-b89b-fe417ff42c27" containerName="registry-server" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.558244 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.560445 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.560746 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.560747 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.560825 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.561208 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.561885 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.577196 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9"] Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.616740 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8825af10-66e6-4cb5-843d-5cf4f3bc56fa-config\") pod \"route-controller-manager-bf67cfb8c-bwdq9\" (UID: \"8825af10-66e6-4cb5-843d-5cf4f3bc56fa\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.616878 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmsdl\" (UniqueName: \"kubernetes.io/projected/8825af10-66e6-4cb5-843d-5cf4f3bc56fa-kube-api-access-pmsdl\") pod \"route-controller-manager-bf67cfb8c-bwdq9\" (UID: \"8825af10-66e6-4cb5-843d-5cf4f3bc56fa\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.616940 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8825af10-66e6-4cb5-843d-5cf4f3bc56fa-client-ca\") pod \"route-controller-manager-bf67cfb8c-bwdq9\" (UID: \"8825af10-66e6-4cb5-843d-5cf4f3bc56fa\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.617016 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8825af10-66e6-4cb5-843d-5cf4f3bc56fa-serving-cert\") pod \"route-controller-manager-bf67cfb8c-bwdq9\" (UID: \"8825af10-66e6-4cb5-843d-5cf4f3bc56fa\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.718070 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmsdl\" (UniqueName: \"kubernetes.io/projected/8825af10-66e6-4cb5-843d-5cf4f3bc56fa-kube-api-access-pmsdl\") pod \"route-controller-manager-bf67cfb8c-bwdq9\" (UID: \"8825af10-66e6-4cb5-843d-5cf4f3bc56fa\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.718142 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8825af10-66e6-4cb5-843d-5cf4f3bc56fa-config\") pod \"route-controller-manager-bf67cfb8c-bwdq9\" (UID: \"8825af10-66e6-4cb5-843d-5cf4f3bc56fa\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.718219 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8825af10-66e6-4cb5-843d-5cf4f3bc56fa-client-ca\") pod \"route-controller-manager-bf67cfb8c-bwdq9\" (UID: \"8825af10-66e6-4cb5-843d-5cf4f3bc56fa\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.718263 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8825af10-66e6-4cb5-843d-5cf4f3bc56fa-serving-cert\") pod \"route-controller-manager-bf67cfb8c-bwdq9\" (UID: \"8825af10-66e6-4cb5-843d-5cf4f3bc56fa\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.719120 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8825af10-66e6-4cb5-843d-5cf4f3bc56fa-client-ca\") pod \"route-controller-manager-bf67cfb8c-bwdq9\" (UID: \"8825af10-66e6-4cb5-843d-5cf4f3bc56fa\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.719680 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8825af10-66e6-4cb5-843d-5cf4f3bc56fa-config\") pod \"route-controller-manager-bf67cfb8c-bwdq9\" (UID: \"8825af10-66e6-4cb5-843d-5cf4f3bc56fa\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.725359 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8825af10-66e6-4cb5-843d-5cf4f3bc56fa-serving-cert\") pod \"route-controller-manager-bf67cfb8c-bwdq9\" (UID: \"8825af10-66e6-4cb5-843d-5cf4f3bc56fa\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.738369 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmsdl\" (UniqueName: \"kubernetes.io/projected/8825af10-66e6-4cb5-843d-5cf4f3bc56fa-kube-api-access-pmsdl\") pod \"route-controller-manager-bf67cfb8c-bwdq9\" (UID: \"8825af10-66e6-4cb5-843d-5cf4f3bc56fa\") " pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:06 crc kubenswrapper[4982]: I1205 19:20:06.884568 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:07 crc kubenswrapper[4982]: I1205 19:20:07.325236 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9"] Dec 05 19:20:07 crc kubenswrapper[4982]: W1205 19:20:07.335054 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8825af10_66e6_4cb5_843d_5cf4f3bc56fa.slice/crio-a7ef56d59c7156ea5824b5f0a79925e5df4f4c7ad762d2442a30d494bf3621c0 WatchSource:0}: Error finding container a7ef56d59c7156ea5824b5f0a79925e5df4f4c7ad762d2442a30d494bf3621c0: Status 404 returned error can't find the container with id a7ef56d59c7156ea5824b5f0a79925e5df4f4c7ad762d2442a30d494bf3621c0 Dec 05 19:20:07 crc kubenswrapper[4982]: I1205 19:20:07.398807 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1fc29e9-8179-4d05-8f06-915b420120d6" path="/var/lib/kubelet/pods/d1fc29e9-8179-4d05-8f06-915b420120d6/volumes" Dec 05 19:20:08 crc kubenswrapper[4982]: I1205 19:20:07.999523 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" event={"ID":"8825af10-66e6-4cb5-843d-5cf4f3bc56fa","Type":"ContainerStarted","Data":"5220fd73675647e3c379c3851d6ed182d03e3f2186826c4c05e54c29539fbd0a"} Dec 05 19:20:08 crc kubenswrapper[4982]: I1205 19:20:07.999567 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" event={"ID":"8825af10-66e6-4cb5-843d-5cf4f3bc56fa","Type":"ContainerStarted","Data":"a7ef56d59c7156ea5824b5f0a79925e5df4f4c7ad762d2442a30d494bf3621c0"} Dec 05 19:20:08 crc kubenswrapper[4982]: I1205 19:20:07.999676 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:08 crc kubenswrapper[4982]: I1205 19:20:08.004486 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" Dec 05 19:20:08 crc kubenswrapper[4982]: I1205 19:20:08.030277 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-bf67cfb8c-bwdq9" podStartSLOduration=4.030258892 podStartE2EDuration="4.030258892s" podCreationTimestamp="2025-12-05 19:20:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:20:08.027889102 +0000 UTC m=+386.909775137" watchObservedRunningTime="2025-12-05 19:20:08.030258892 +0000 UTC m=+386.912144887" Dec 05 19:20:09 crc kubenswrapper[4982]: I1205 19:20:09.850982 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-v82hh" Dec 05 19:20:09 crc kubenswrapper[4982]: I1205 19:20:09.927008 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-z7tw9"] Dec 05 19:20:12 crc kubenswrapper[4982]: I1205 19:20:12.557769 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:20:12 crc kubenswrapper[4982]: I1205 19:20:12.558266 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.824133 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6k2jq"] Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.825180 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-6k2jq" podUID="4718733b-932d-413f-9b5c-3c8a773df710" containerName="registry-server" containerID="cri-o://00033f8d46e76911d11888b7f94d3ab7911179aa07c464e83b43ea291302eef4" gracePeriod=30 Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.830322 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bhlmf"] Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.830770 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bhlmf" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" containerName="registry-server" containerID="cri-o://9d37e0b5186e5e7f9ab770337506c93b4ffc0d7d29da9e8b71317480d69c7b8c" gracePeriod=30 Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.837604 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-wdf6s"] Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.837976 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" podUID="f0aef6cf-30ed-4fc5-b84d-585e4692afe9" containerName="marketplace-operator" containerID="cri-o://b4fafc70df566d0ff95497164a6c826463887d66fc3e290a7d8db616a5540cda" gracePeriod=30 Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.841956 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9cd5r"] Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.842249 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-9cd5r" podUID="c2800739-c6ca-495c-a39b-b619242e6867" containerName="registry-server" containerID="cri-o://580fd60b8f07f6fe91fcfe89fdb182cf3145691c2a274a8dec231f0c90a25bbf" gracePeriod=30 Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.858807 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vpkcb"] Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.859307 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vpkcb" podUID="10ea7812-4e92-4291-9929-636eccbae790" containerName="registry-server" containerID="cri-o://895d3d72e532abde4513adbdb44e1bd894af3f6a60696b533531e9fe535e5ce4" gracePeriod=30 Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.868341 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-n4462"] Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.869180 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.879477 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-n4462"] Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.970592 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jj8cr\" (UniqueName: \"kubernetes.io/projected/66d787d5-2cd9-4a22-8549-acd33135e4f9-kube-api-access-jj8cr\") pod \"marketplace-operator-79b997595-n4462\" (UID: \"66d787d5-2cd9-4a22-8549-acd33135e4f9\") " pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.970717 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/66d787d5-2cd9-4a22-8549-acd33135e4f9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-n4462\" (UID: \"66d787d5-2cd9-4a22-8549-acd33135e4f9\") " pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:22 crc kubenswrapper[4982]: I1205 19:20:22.970753 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/66d787d5-2cd9-4a22-8549-acd33135e4f9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-n4462\" (UID: \"66d787d5-2cd9-4a22-8549-acd33135e4f9\") " pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:23 crc kubenswrapper[4982]: E1205 19:20:23.050791 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 895d3d72e532abde4513adbdb44e1bd894af3f6a60696b533531e9fe535e5ce4 is running failed: container process not found" containerID="895d3d72e532abde4513adbdb44e1bd894af3f6a60696b533531e9fe535e5ce4" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 19:20:23 crc kubenswrapper[4982]: E1205 19:20:23.051288 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 895d3d72e532abde4513adbdb44e1bd894af3f6a60696b533531e9fe535e5ce4 is running failed: container process not found" containerID="895d3d72e532abde4513adbdb44e1bd894af3f6a60696b533531e9fe535e5ce4" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 19:20:23 crc kubenswrapper[4982]: E1205 19:20:23.051910 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 895d3d72e532abde4513adbdb44e1bd894af3f6a60696b533531e9fe535e5ce4 is running failed: container process not found" containerID="895d3d72e532abde4513adbdb44e1bd894af3f6a60696b533531e9fe535e5ce4" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 19:20:23 crc kubenswrapper[4982]: E1205 19:20:23.051942 4982 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 895d3d72e532abde4513adbdb44e1bd894af3f6a60696b533531e9fe535e5ce4 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-vpkcb" podUID="10ea7812-4e92-4291-9929-636eccbae790" containerName="registry-server" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.071557 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/66d787d5-2cd9-4a22-8549-acd33135e4f9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-n4462\" (UID: \"66d787d5-2cd9-4a22-8549-acd33135e4f9\") " pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.071613 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/66d787d5-2cd9-4a22-8549-acd33135e4f9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-n4462\" (UID: \"66d787d5-2cd9-4a22-8549-acd33135e4f9\") " pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.071654 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jj8cr\" (UniqueName: \"kubernetes.io/projected/66d787d5-2cd9-4a22-8549-acd33135e4f9-kube-api-access-jj8cr\") pod \"marketplace-operator-79b997595-n4462\" (UID: \"66d787d5-2cd9-4a22-8549-acd33135e4f9\") " pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.073638 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/66d787d5-2cd9-4a22-8549-acd33135e4f9-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-n4462\" (UID: \"66d787d5-2cd9-4a22-8549-acd33135e4f9\") " pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.079665 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/66d787d5-2cd9-4a22-8549-acd33135e4f9-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-n4462\" (UID: \"66d787d5-2cd9-4a22-8549-acd33135e4f9\") " pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.087280 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jj8cr\" (UniqueName: \"kubernetes.io/projected/66d787d5-2cd9-4a22-8549-acd33135e4f9-kube-api-access-jj8cr\") pod \"marketplace-operator-79b997595-n4462\" (UID: \"66d787d5-2cd9-4a22-8549-acd33135e4f9\") " pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.096504 4982 generic.go:334] "Generic (PLEG): container finished" podID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" containerID="9d37e0b5186e5e7f9ab770337506c93b4ffc0d7d29da9e8b71317480d69c7b8c" exitCode=0 Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.096587 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhlmf" event={"ID":"335c7642-44cd-41bf-99ac-9c9fcbbe74be","Type":"ContainerDied","Data":"9d37e0b5186e5e7f9ab770337506c93b4ffc0d7d29da9e8b71317480d69c7b8c"} Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.099452 4982 generic.go:334] "Generic (PLEG): container finished" podID="c2800739-c6ca-495c-a39b-b619242e6867" containerID="580fd60b8f07f6fe91fcfe89fdb182cf3145691c2a274a8dec231f0c90a25bbf" exitCode=0 Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.099527 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9cd5r" event={"ID":"c2800739-c6ca-495c-a39b-b619242e6867","Type":"ContainerDied","Data":"580fd60b8f07f6fe91fcfe89fdb182cf3145691c2a274a8dec231f0c90a25bbf"} Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.101581 4982 generic.go:334] "Generic (PLEG): container finished" podID="10ea7812-4e92-4291-9929-636eccbae790" containerID="895d3d72e532abde4513adbdb44e1bd894af3f6a60696b533531e9fe535e5ce4" exitCode=0 Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.101624 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpkcb" event={"ID":"10ea7812-4e92-4291-9929-636eccbae790","Type":"ContainerDied","Data":"895d3d72e532abde4513adbdb44e1bd894af3f6a60696b533531e9fe535e5ce4"} Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.103858 4982 generic.go:334] "Generic (PLEG): container finished" podID="4718733b-932d-413f-9b5c-3c8a773df710" containerID="00033f8d46e76911d11888b7f94d3ab7911179aa07c464e83b43ea291302eef4" exitCode=0 Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.103914 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6k2jq" event={"ID":"4718733b-932d-413f-9b5c-3c8a773df710","Type":"ContainerDied","Data":"00033f8d46e76911d11888b7f94d3ab7911179aa07c464e83b43ea291302eef4"} Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.105782 4982 generic.go:334] "Generic (PLEG): container finished" podID="f0aef6cf-30ed-4fc5-b84d-585e4692afe9" containerID="b4fafc70df566d0ff95497164a6c826463887d66fc3e290a7d8db616a5540cda" exitCode=0 Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.105806 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" event={"ID":"f0aef6cf-30ed-4fc5-b84d-585e4692afe9","Type":"ContainerDied","Data":"b4fafc70df566d0ff95497164a6c826463887d66fc3e290a7d8db616a5540cda"} Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.105830 4982 scope.go:117] "RemoveContainer" containerID="f724160aff553a57e8ffc46d0828689a3ed89dca5d69621ba675eb0c61e33a7d" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.195961 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.387842 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.407865 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.453111 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.459208 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.460196 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.481768 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ctnw\" (UniqueName: \"kubernetes.io/projected/10ea7812-4e92-4291-9929-636eccbae790-kube-api-access-2ctnw\") pod \"10ea7812-4e92-4291-9929-636eccbae790\" (UID: \"10ea7812-4e92-4291-9929-636eccbae790\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.481832 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10ea7812-4e92-4291-9929-636eccbae790-catalog-content\") pod \"10ea7812-4e92-4291-9929-636eccbae790\" (UID: \"10ea7812-4e92-4291-9929-636eccbae790\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.481866 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wcn4x\" (UniqueName: \"kubernetes.io/projected/335c7642-44cd-41bf-99ac-9c9fcbbe74be-kube-api-access-wcn4x\") pod \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\" (UID: \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.481984 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10ea7812-4e92-4291-9929-636eccbae790-utilities\") pod \"10ea7812-4e92-4291-9929-636eccbae790\" (UID: \"10ea7812-4e92-4291-9929-636eccbae790\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.482983 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/335c7642-44cd-41bf-99ac-9c9fcbbe74be-catalog-content\") pod \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\" (UID: \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.483038 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/335c7642-44cd-41bf-99ac-9c9fcbbe74be-utilities\") pod \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\" (UID: \"335c7642-44cd-41bf-99ac-9c9fcbbe74be\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.487033 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10ea7812-4e92-4291-9929-636eccbae790-kube-api-access-2ctnw" (OuterVolumeSpecName: "kube-api-access-2ctnw") pod "10ea7812-4e92-4291-9929-636eccbae790" (UID: "10ea7812-4e92-4291-9929-636eccbae790"). InnerVolumeSpecName "kube-api-access-2ctnw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.488574 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/335c7642-44cd-41bf-99ac-9c9fcbbe74be-utilities" (OuterVolumeSpecName: "utilities") pod "335c7642-44cd-41bf-99ac-9c9fcbbe74be" (UID: "335c7642-44cd-41bf-99ac-9c9fcbbe74be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.495367 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10ea7812-4e92-4291-9929-636eccbae790-utilities" (OuterVolumeSpecName: "utilities") pod "10ea7812-4e92-4291-9929-636eccbae790" (UID: "10ea7812-4e92-4291-9929-636eccbae790"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.496249 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/335c7642-44cd-41bf-99ac-9c9fcbbe74be-kube-api-access-wcn4x" (OuterVolumeSpecName: "kube-api-access-wcn4x") pod "335c7642-44cd-41bf-99ac-9c9fcbbe74be" (UID: "335c7642-44cd-41bf-99ac-9c9fcbbe74be"). InnerVolumeSpecName "kube-api-access-wcn4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.560564 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/335c7642-44cd-41bf-99ac-9c9fcbbe74be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "335c7642-44cd-41bf-99ac-9c9fcbbe74be" (UID: "335c7642-44cd-41bf-99ac-9c9fcbbe74be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.584824 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5527\" (UniqueName: \"kubernetes.io/projected/c2800739-c6ca-495c-a39b-b619242e6867-kube-api-access-c5527\") pod \"c2800739-c6ca-495c-a39b-b619242e6867\" (UID: \"c2800739-c6ca-495c-a39b-b619242e6867\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.584896 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-marketplace-operator-metrics\") pod \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\" (UID: \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.584929 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-marketplace-trusted-ca\") pod \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\" (UID: \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.585010 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2800739-c6ca-495c-a39b-b619242e6867-utilities\") pod \"c2800739-c6ca-495c-a39b-b619242e6867\" (UID: \"c2800739-c6ca-495c-a39b-b619242e6867\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.585047 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4718733b-932d-413f-9b5c-3c8a773df710-catalog-content\") pod \"4718733b-932d-413f-9b5c-3c8a773df710\" (UID: \"4718733b-932d-413f-9b5c-3c8a773df710\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.585081 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9z8gr\" (UniqueName: \"kubernetes.io/projected/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-kube-api-access-9z8gr\") pod \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\" (UID: \"f0aef6cf-30ed-4fc5-b84d-585e4692afe9\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.585127 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8k8px\" (UniqueName: \"kubernetes.io/projected/4718733b-932d-413f-9b5c-3c8a773df710-kube-api-access-8k8px\") pod \"4718733b-932d-413f-9b5c-3c8a773df710\" (UID: \"4718733b-932d-413f-9b5c-3c8a773df710\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.585299 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4718733b-932d-413f-9b5c-3c8a773df710-utilities\") pod \"4718733b-932d-413f-9b5c-3c8a773df710\" (UID: \"4718733b-932d-413f-9b5c-3c8a773df710\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.585330 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2800739-c6ca-495c-a39b-b619242e6867-catalog-content\") pod \"c2800739-c6ca-495c-a39b-b619242e6867\" (UID: \"c2800739-c6ca-495c-a39b-b619242e6867\") " Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.585569 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10ea7812-4e92-4291-9929-636eccbae790-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.585592 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/335c7642-44cd-41bf-99ac-9c9fcbbe74be-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.585605 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/335c7642-44cd-41bf-99ac-9c9fcbbe74be-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.585616 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ctnw\" (UniqueName: \"kubernetes.io/projected/10ea7812-4e92-4291-9929-636eccbae790-kube-api-access-2ctnw\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.585629 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wcn4x\" (UniqueName: \"kubernetes.io/projected/335c7642-44cd-41bf-99ac-9c9fcbbe74be-kube-api-access-wcn4x\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.586519 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "f0aef6cf-30ed-4fc5-b84d-585e4692afe9" (UID: "f0aef6cf-30ed-4fc5-b84d-585e4692afe9"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.586713 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4718733b-932d-413f-9b5c-3c8a773df710-utilities" (OuterVolumeSpecName: "utilities") pod "4718733b-932d-413f-9b5c-3c8a773df710" (UID: "4718733b-932d-413f-9b5c-3c8a773df710"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.586708 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2800739-c6ca-495c-a39b-b619242e6867-utilities" (OuterVolumeSpecName: "utilities") pod "c2800739-c6ca-495c-a39b-b619242e6867" (UID: "c2800739-c6ca-495c-a39b-b619242e6867"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.587829 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "f0aef6cf-30ed-4fc5-b84d-585e4692afe9" (UID: "f0aef6cf-30ed-4fc5-b84d-585e4692afe9"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.588705 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4718733b-932d-413f-9b5c-3c8a773df710-kube-api-access-8k8px" (OuterVolumeSpecName: "kube-api-access-8k8px") pod "4718733b-932d-413f-9b5c-3c8a773df710" (UID: "4718733b-932d-413f-9b5c-3c8a773df710"). InnerVolumeSpecName "kube-api-access-8k8px". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.589406 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2800739-c6ca-495c-a39b-b619242e6867-kube-api-access-c5527" (OuterVolumeSpecName: "kube-api-access-c5527") pod "c2800739-c6ca-495c-a39b-b619242e6867" (UID: "c2800739-c6ca-495c-a39b-b619242e6867"). InnerVolumeSpecName "kube-api-access-c5527". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.591068 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-kube-api-access-9z8gr" (OuterVolumeSpecName: "kube-api-access-9z8gr") pod "f0aef6cf-30ed-4fc5-b84d-585e4692afe9" (UID: "f0aef6cf-30ed-4fc5-b84d-585e4692afe9"). InnerVolumeSpecName "kube-api-access-9z8gr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.613013 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2800739-c6ca-495c-a39b-b619242e6867-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c2800739-c6ca-495c-a39b-b619242e6867" (UID: "c2800739-c6ca-495c-a39b-b619242e6867"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.621012 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10ea7812-4e92-4291-9929-636eccbae790-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "10ea7812-4e92-4291-9929-636eccbae790" (UID: "10ea7812-4e92-4291-9929-636eccbae790"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.654126 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4718733b-932d-413f-9b5c-3c8a773df710-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4718733b-932d-413f-9b5c-3c8a773df710" (UID: "4718733b-932d-413f-9b5c-3c8a773df710"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.687235 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8k8px\" (UniqueName: \"kubernetes.io/projected/4718733b-932d-413f-9b5c-3c8a773df710-kube-api-access-8k8px\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.687292 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4718733b-932d-413f-9b5c-3c8a773df710-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.687309 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c2800739-c6ca-495c-a39b-b619242e6867-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.687320 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5527\" (UniqueName: \"kubernetes.io/projected/c2800739-c6ca-495c-a39b-b619242e6867-kube-api-access-c5527\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.687332 4982 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.687345 4982 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.687359 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10ea7812-4e92-4291-9929-636eccbae790-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.687371 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c2800739-c6ca-495c-a39b-b619242e6867-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.687382 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4718733b-932d-413f-9b5c-3c8a773df710-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.687395 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9z8gr\" (UniqueName: \"kubernetes.io/projected/f0aef6cf-30ed-4fc5-b84d-585e4692afe9-kube-api-access-9z8gr\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:23 crc kubenswrapper[4982]: I1205 19:20:23.751814 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-n4462"] Dec 05 19:20:23 crc kubenswrapper[4982]: W1205 19:20:23.755386 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66d787d5_2cd9_4a22_8549_acd33135e4f9.slice/crio-44d2a2f8f8bffd1318d847d0b21b17a60168cc0c318d2b299a67a9ad857057ee WatchSource:0}: Error finding container 44d2a2f8f8bffd1318d847d0b21b17a60168cc0c318d2b299a67a9ad857057ee: Status 404 returned error can't find the container with id 44d2a2f8f8bffd1318d847d0b21b17a60168cc0c318d2b299a67a9ad857057ee Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.113060 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" event={"ID":"f0aef6cf-30ed-4fc5-b84d-585e4692afe9","Type":"ContainerDied","Data":"1672a1e212f35d494c2ecde5da920a7ac1b502772de4137897728e787e4cbb46"} Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.113111 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-wdf6s" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.113420 4982 scope.go:117] "RemoveContainer" containerID="b4fafc70df566d0ff95497164a6c826463887d66fc3e290a7d8db616a5540cda" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.115998 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhlmf" event={"ID":"335c7642-44cd-41bf-99ac-9c9fcbbe74be","Type":"ContainerDied","Data":"3ffa17404703287cb362623215e00f6c9b576f3ddb9f012abfb89052411911a4"} Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.116084 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bhlmf" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.118823 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-9cd5r" event={"ID":"c2800739-c6ca-495c-a39b-b619242e6867","Type":"ContainerDied","Data":"9728b8582b459467de6c7e44f1b0c721afecdeb9fdbbe9bb3bcd25c67003a24f"} Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.118922 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-9cd5r" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.121445 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vpkcb" event={"ID":"10ea7812-4e92-4291-9929-636eccbae790","Type":"ContainerDied","Data":"0659bdd40a6dd08c938c33212b20208055c83cd8757457c4f78a12d930a390e6"} Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.121473 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vpkcb" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.123371 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6k2jq" event={"ID":"4718733b-932d-413f-9b5c-3c8a773df710","Type":"ContainerDied","Data":"2ad79d55999a84f44a604eba892e299069493d430fc78ad42ce19da529b25ce2"} Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.123462 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6k2jq" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.125515 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-n4462" event={"ID":"66d787d5-2cd9-4a22-8549-acd33135e4f9","Type":"ContainerStarted","Data":"687fce53740fe841d730665d85bfac06269de9df4483a89810278fe6e0422715"} Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.125545 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-n4462" event={"ID":"66d787d5-2cd9-4a22-8549-acd33135e4f9","Type":"ContainerStarted","Data":"44d2a2f8f8bffd1318d847d0b21b17a60168cc0c318d2b299a67a9ad857057ee"} Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.126074 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.127230 4982 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-n4462 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.65:8080/healthz\": dial tcp 10.217.0.65:8080: connect: connection refused" start-of-body= Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.127850 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-n4462" podUID="66d787d5-2cd9-4a22-8549-acd33135e4f9" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.65:8080/healthz\": dial tcp 10.217.0.65:8080: connect: connection refused" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.135483 4982 scope.go:117] "RemoveContainer" containerID="9d37e0b5186e5e7f9ab770337506c93b4ffc0d7d29da9e8b71317480d69c7b8c" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.154886 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-n4462" podStartSLOduration=2.154864092 podStartE2EDuration="2.154864092s" podCreationTimestamp="2025-12-05 19:20:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:20:24.153927078 +0000 UTC m=+403.035813093" watchObservedRunningTime="2025-12-05 19:20:24.154864092 +0000 UTC m=+403.036750107" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.168708 4982 scope.go:117] "RemoveContainer" containerID="f9b4d5a94053b6a48c9297dadd89c062f33f9e9797a5180fe156192e6f9ff40c" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.195721 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-wdf6s"] Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.202747 4982 scope.go:117] "RemoveContainer" containerID="2dd76e8298426f7edd33acb6616de061bacc6064cc2131e9f9964351ced3df45" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.204000 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-wdf6s"] Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.215515 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bhlmf"] Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.221930 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bhlmf"] Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.228747 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-9cd5r"] Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.235058 4982 scope.go:117] "RemoveContainer" containerID="580fd60b8f07f6fe91fcfe89fdb182cf3145691c2a274a8dec231f0c90a25bbf" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.235551 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-9cd5r"] Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.244301 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vpkcb"] Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.251559 4982 scope.go:117] "RemoveContainer" containerID="2bbe9551a0d207b6ae2bdc5560028c186d4d64b63562c9988a4790654328c080" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.252485 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vpkcb"] Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.256572 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-6k2jq"] Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.260657 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-6k2jq"] Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.267101 4982 scope.go:117] "RemoveContainer" containerID="2ffcb5185676eb08927773f633416636b72c238ffc8e8ad8d566f3a48ac6ebc4" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.281724 4982 scope.go:117] "RemoveContainer" containerID="895d3d72e532abde4513adbdb44e1bd894af3f6a60696b533531e9fe535e5ce4" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.302397 4982 scope.go:117] "RemoveContainer" containerID="a1d681d6be5132121be6bfa3055a44a3d39fd49d4ad1ccdde31e86265cff01bf" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.320611 4982 scope.go:117] "RemoveContainer" containerID="7b593ba526b4ed2ad40357c3120f7dc085fc071dafe1a825030f9855d8aeb270" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.333049 4982 scope.go:117] "RemoveContainer" containerID="00033f8d46e76911d11888b7f94d3ab7911179aa07c464e83b43ea291302eef4" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.344639 4982 scope.go:117] "RemoveContainer" containerID="5aefd43417f9d9adea4bae5ba678e6cafd8a723bfdcd89c0685f3112e4e06bbd" Dec 05 19:20:24 crc kubenswrapper[4982]: I1205 19:20:24.360140 4982 scope.go:117] "RemoveContainer" containerID="2d30f8cf0b43c7492c5742fd050920e5fb93b28ddb01275368d419404abe2c4d" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.033877 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-lbk8q"] Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034071 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2800739-c6ca-495c-a39b-b619242e6867" containerName="registry-server" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034082 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2800739-c6ca-495c-a39b-b619242e6867" containerName="registry-server" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034092 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4718733b-932d-413f-9b5c-3c8a773df710" containerName="registry-server" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034101 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4718733b-932d-413f-9b5c-3c8a773df710" containerName="registry-server" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034110 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10ea7812-4e92-4291-9929-636eccbae790" containerName="registry-server" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034118 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="10ea7812-4e92-4291-9929-636eccbae790" containerName="registry-server" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034127 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0aef6cf-30ed-4fc5-b84d-585e4692afe9" containerName="marketplace-operator" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034134 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0aef6cf-30ed-4fc5-b84d-585e4692afe9" containerName="marketplace-operator" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034156 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2800739-c6ca-495c-a39b-b619242e6867" containerName="extract-utilities" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034164 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2800739-c6ca-495c-a39b-b619242e6867" containerName="extract-utilities" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034175 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0aef6cf-30ed-4fc5-b84d-585e4692afe9" containerName="marketplace-operator" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034181 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0aef6cf-30ed-4fc5-b84d-585e4692afe9" containerName="marketplace-operator" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034189 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10ea7812-4e92-4291-9929-636eccbae790" containerName="extract-content" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034213 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="10ea7812-4e92-4291-9929-636eccbae790" containerName="extract-content" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034227 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" containerName="extract-utilities" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034236 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" containerName="extract-utilities" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034248 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4718733b-932d-413f-9b5c-3c8a773df710" containerName="extract-content" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034254 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4718733b-932d-413f-9b5c-3c8a773df710" containerName="extract-content" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034262 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10ea7812-4e92-4291-9929-636eccbae790" containerName="extract-utilities" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034267 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="10ea7812-4e92-4291-9929-636eccbae790" containerName="extract-utilities" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034276 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" containerName="extract-content" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034282 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" containerName="extract-content" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034291 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2800739-c6ca-495c-a39b-b619242e6867" containerName="extract-content" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034296 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2800739-c6ca-495c-a39b-b619242e6867" containerName="extract-content" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034307 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4718733b-932d-413f-9b5c-3c8a773df710" containerName="extract-utilities" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034312 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4718733b-932d-413f-9b5c-3c8a773df710" containerName="extract-utilities" Dec 05 19:20:25 crc kubenswrapper[4982]: E1205 19:20:25.034319 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" containerName="registry-server" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034325 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" containerName="registry-server" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034425 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0aef6cf-30ed-4fc5-b84d-585e4692afe9" containerName="marketplace-operator" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034436 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="10ea7812-4e92-4291-9929-636eccbae790" containerName="registry-server" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034444 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4718733b-932d-413f-9b5c-3c8a773df710" containerName="registry-server" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034453 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2800739-c6ca-495c-a39b-b619242e6867" containerName="registry-server" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034461 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0aef6cf-30ed-4fc5-b84d-585e4692afe9" containerName="marketplace-operator" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.034469 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" containerName="registry-server" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.035129 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.038875 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.049051 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lbk8q"] Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.107754 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20fee44a-379d-443b-ae53-3d595e7bcdb1-utilities\") pod \"redhat-marketplace-lbk8q\" (UID: \"20fee44a-379d-443b-ae53-3d595e7bcdb1\") " pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.107824 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20fee44a-379d-443b-ae53-3d595e7bcdb1-catalog-content\") pod \"redhat-marketplace-lbk8q\" (UID: \"20fee44a-379d-443b-ae53-3d595e7bcdb1\") " pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.107849 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7x6t\" (UniqueName: \"kubernetes.io/projected/20fee44a-379d-443b-ae53-3d595e7bcdb1-kube-api-access-c7x6t\") pod \"redhat-marketplace-lbk8q\" (UID: \"20fee44a-379d-443b-ae53-3d595e7bcdb1\") " pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.142136 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-n4462" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.209020 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20fee44a-379d-443b-ae53-3d595e7bcdb1-utilities\") pod \"redhat-marketplace-lbk8q\" (UID: \"20fee44a-379d-443b-ae53-3d595e7bcdb1\") " pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.209279 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20fee44a-379d-443b-ae53-3d595e7bcdb1-catalog-content\") pod \"redhat-marketplace-lbk8q\" (UID: \"20fee44a-379d-443b-ae53-3d595e7bcdb1\") " pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.209326 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7x6t\" (UniqueName: \"kubernetes.io/projected/20fee44a-379d-443b-ae53-3d595e7bcdb1-kube-api-access-c7x6t\") pod \"redhat-marketplace-lbk8q\" (UID: \"20fee44a-379d-443b-ae53-3d595e7bcdb1\") " pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.210884 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/20fee44a-379d-443b-ae53-3d595e7bcdb1-utilities\") pod \"redhat-marketplace-lbk8q\" (UID: \"20fee44a-379d-443b-ae53-3d595e7bcdb1\") " pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.213668 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/20fee44a-379d-443b-ae53-3d595e7bcdb1-catalog-content\") pod \"redhat-marketplace-lbk8q\" (UID: \"20fee44a-379d-443b-ae53-3d595e7bcdb1\") " pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.230211 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7x6t\" (UniqueName: \"kubernetes.io/projected/20fee44a-379d-443b-ae53-3d595e7bcdb1-kube-api-access-c7x6t\") pod \"redhat-marketplace-lbk8q\" (UID: \"20fee44a-379d-443b-ae53-3d595e7bcdb1\") " pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.235028 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7llfc"] Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.236173 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.239587 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.246833 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7llfc"] Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.309912 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlfx2\" (UniqueName: \"kubernetes.io/projected/e357daf9-c7b6-4ebb-a5a0-0c1046ba7037-kube-api-access-mlfx2\") pod \"redhat-operators-7llfc\" (UID: \"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037\") " pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.309968 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e357daf9-c7b6-4ebb-a5a0-0c1046ba7037-catalog-content\") pod \"redhat-operators-7llfc\" (UID: \"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037\") " pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.309999 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e357daf9-c7b6-4ebb-a5a0-0c1046ba7037-utilities\") pod \"redhat-operators-7llfc\" (UID: \"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037\") " pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.353181 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.402001 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10ea7812-4e92-4291-9929-636eccbae790" path="/var/lib/kubelet/pods/10ea7812-4e92-4291-9929-636eccbae790/volumes" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.403051 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="335c7642-44cd-41bf-99ac-9c9fcbbe74be" path="/var/lib/kubelet/pods/335c7642-44cd-41bf-99ac-9c9fcbbe74be/volumes" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.404182 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4718733b-932d-413f-9b5c-3c8a773df710" path="/var/lib/kubelet/pods/4718733b-932d-413f-9b5c-3c8a773df710/volumes" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.405878 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2800739-c6ca-495c-a39b-b619242e6867" path="/var/lib/kubelet/pods/c2800739-c6ca-495c-a39b-b619242e6867/volumes" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.406837 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0aef6cf-30ed-4fc5-b84d-585e4692afe9" path="/var/lib/kubelet/pods/f0aef6cf-30ed-4fc5-b84d-585e4692afe9/volumes" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.410764 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e357daf9-c7b6-4ebb-a5a0-0c1046ba7037-utilities\") pod \"redhat-operators-7llfc\" (UID: \"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037\") " pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.410853 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlfx2\" (UniqueName: \"kubernetes.io/projected/e357daf9-c7b6-4ebb-a5a0-0c1046ba7037-kube-api-access-mlfx2\") pod \"redhat-operators-7llfc\" (UID: \"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037\") " pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.410901 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e357daf9-c7b6-4ebb-a5a0-0c1046ba7037-catalog-content\") pod \"redhat-operators-7llfc\" (UID: \"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037\") " pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.411255 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e357daf9-c7b6-4ebb-a5a0-0c1046ba7037-utilities\") pod \"redhat-operators-7llfc\" (UID: \"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037\") " pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.414580 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e357daf9-c7b6-4ebb-a5a0-0c1046ba7037-catalog-content\") pod \"redhat-operators-7llfc\" (UID: \"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037\") " pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.431450 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlfx2\" (UniqueName: \"kubernetes.io/projected/e357daf9-c7b6-4ebb-a5a0-0c1046ba7037-kube-api-access-mlfx2\") pod \"redhat-operators-7llfc\" (UID: \"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037\") " pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.564174 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.749610 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7llfc"] Dec 05 19:20:25 crc kubenswrapper[4982]: I1205 19:20:25.789120 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-lbk8q"] Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.057563 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbk8q" event={"ID":"20fee44a-379d-443b-ae53-3d595e7bcdb1","Type":"ContainerStarted","Data":"6e235cc7738502d7cbd3f14b701c0972b629f9f8d979eb2dac34ebe53e8e6083"} Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.059903 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7llfc" event={"ID":"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037","Type":"ContainerStarted","Data":"3ba2904c5b51e35f1a8f251d0a3af90a2ad01900ee7ce86123e70fc7d8a296b7"} Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.447857 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-65jpq"] Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.450908 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.453285 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.455776 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-65jpq"] Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.637139 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dv6hf"] Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.639141 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.642388 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.653262 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b5c769d-026f-40ae-a15d-c1916e429335-catalog-content\") pod \"community-operators-65jpq\" (UID: \"2b5c769d-026f-40ae-a15d-c1916e429335\") " pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.653323 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b5c769d-026f-40ae-a15d-c1916e429335-utilities\") pod \"community-operators-65jpq\" (UID: \"2b5c769d-026f-40ae-a15d-c1916e429335\") " pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.653351 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk5ft\" (UniqueName: \"kubernetes.io/projected/2b5c769d-026f-40ae-a15d-c1916e429335-kube-api-access-jk5ft\") pod \"community-operators-65jpq\" (UID: \"2b5c769d-026f-40ae-a15d-c1916e429335\") " pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.657416 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dv6hf"] Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.754653 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-utilities\") pod \"certified-operators-dv6hf\" (UID: \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\") " pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.754704 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-catalog-content\") pod \"certified-operators-dv6hf\" (UID: \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\") " pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.754740 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b5c769d-026f-40ae-a15d-c1916e429335-catalog-content\") pod \"community-operators-65jpq\" (UID: \"2b5c769d-026f-40ae-a15d-c1916e429335\") " pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.754900 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b5c769d-026f-40ae-a15d-c1916e429335-utilities\") pod \"community-operators-65jpq\" (UID: \"2b5c769d-026f-40ae-a15d-c1916e429335\") " pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.754935 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvfvx\" (UniqueName: \"kubernetes.io/projected/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-kube-api-access-kvfvx\") pod \"certified-operators-dv6hf\" (UID: \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\") " pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.754971 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk5ft\" (UniqueName: \"kubernetes.io/projected/2b5c769d-026f-40ae-a15d-c1916e429335-kube-api-access-jk5ft\") pod \"community-operators-65jpq\" (UID: \"2b5c769d-026f-40ae-a15d-c1916e429335\") " pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.755567 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2b5c769d-026f-40ae-a15d-c1916e429335-catalog-content\") pod \"community-operators-65jpq\" (UID: \"2b5c769d-026f-40ae-a15d-c1916e429335\") " pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.755604 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2b5c769d-026f-40ae-a15d-c1916e429335-utilities\") pod \"community-operators-65jpq\" (UID: \"2b5c769d-026f-40ae-a15d-c1916e429335\") " pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.789980 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk5ft\" (UniqueName: \"kubernetes.io/projected/2b5c769d-026f-40ae-a15d-c1916e429335-kube-api-access-jk5ft\") pod \"community-operators-65jpq\" (UID: \"2b5c769d-026f-40ae-a15d-c1916e429335\") " pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.856422 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvfvx\" (UniqueName: \"kubernetes.io/projected/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-kube-api-access-kvfvx\") pod \"certified-operators-dv6hf\" (UID: \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\") " pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.856511 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-utilities\") pod \"certified-operators-dv6hf\" (UID: \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\") " pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.856530 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-catalog-content\") pod \"certified-operators-dv6hf\" (UID: \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\") " pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.856951 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-utilities\") pod \"certified-operators-dv6hf\" (UID: \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\") " pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.857024 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-catalog-content\") pod \"certified-operators-dv6hf\" (UID: \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\") " pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.873511 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvfvx\" (UniqueName: \"kubernetes.io/projected/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-kube-api-access-kvfvx\") pod \"certified-operators-dv6hf\" (UID: \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\") " pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:27 crc kubenswrapper[4982]: I1205 19:20:27.970541 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:28 crc kubenswrapper[4982]: I1205 19:20:28.072553 4982 generic.go:334] "Generic (PLEG): container finished" podID="20fee44a-379d-443b-ae53-3d595e7bcdb1" containerID="54cbf22b107a80d2092f955a2e39dffd30df0cd50c843f06e8e6e0cfa2295638" exitCode=0 Dec 05 19:20:28 crc kubenswrapper[4982]: I1205 19:20:28.072923 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbk8q" event={"ID":"20fee44a-379d-443b-ae53-3d595e7bcdb1","Type":"ContainerDied","Data":"54cbf22b107a80d2092f955a2e39dffd30df0cd50c843f06e8e6e0cfa2295638"} Dec 05 19:20:28 crc kubenswrapper[4982]: I1205 19:20:28.076361 4982 generic.go:334] "Generic (PLEG): container finished" podID="e357daf9-c7b6-4ebb-a5a0-0c1046ba7037" containerID="5844c9c07c39ec72f6472d5b0e108124920fde87b7b4645bf63610b32f33211b" exitCode=0 Dec 05 19:20:28 crc kubenswrapper[4982]: I1205 19:20:28.076408 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7llfc" event={"ID":"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037","Type":"ContainerDied","Data":"5844c9c07c39ec72f6472d5b0e108124920fde87b7b4645bf63610b32f33211b"} Dec 05 19:20:28 crc kubenswrapper[4982]: I1205 19:20:28.083378 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:28 crc kubenswrapper[4982]: I1205 19:20:28.211660 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dv6hf"] Dec 05 19:20:28 crc kubenswrapper[4982]: I1205 19:20:28.533011 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-65jpq"] Dec 05 19:20:29 crc kubenswrapper[4982]: I1205 19:20:29.082969 4982 generic.go:334] "Generic (PLEG): container finished" podID="2b5c769d-026f-40ae-a15d-c1916e429335" containerID="ba564fcd24f2382baadffd9861752d05c794ca97d0c893733829bee7c632e0b0" exitCode=0 Dec 05 19:20:29 crc kubenswrapper[4982]: I1205 19:20:29.083108 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-65jpq" event={"ID":"2b5c769d-026f-40ae-a15d-c1916e429335","Type":"ContainerDied","Data":"ba564fcd24f2382baadffd9861752d05c794ca97d0c893733829bee7c632e0b0"} Dec 05 19:20:29 crc kubenswrapper[4982]: I1205 19:20:29.083446 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-65jpq" event={"ID":"2b5c769d-026f-40ae-a15d-c1916e429335","Type":"ContainerStarted","Data":"77ca0144de92804282d4d7fa258e773fda348be4aded1f74878a7f7349b53839"} Dec 05 19:20:29 crc kubenswrapper[4982]: I1205 19:20:29.086259 4982 generic.go:334] "Generic (PLEG): container finished" podID="20fee44a-379d-443b-ae53-3d595e7bcdb1" containerID="aa58fdd338253d4dcd4b634ab9cc973b380b3889fb23c29f872eeab86ccfb7ca" exitCode=0 Dec 05 19:20:29 crc kubenswrapper[4982]: I1205 19:20:29.086324 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbk8q" event={"ID":"20fee44a-379d-443b-ae53-3d595e7bcdb1","Type":"ContainerDied","Data":"aa58fdd338253d4dcd4b634ab9cc973b380b3889fb23c29f872eeab86ccfb7ca"} Dec 05 19:20:29 crc kubenswrapper[4982]: I1205 19:20:29.089078 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7llfc" event={"ID":"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037","Type":"ContainerStarted","Data":"0e2d8f207a80806e4f6060d22c1573ad3d92561639bb7649f35ad1b9902e0b6e"} Dec 05 19:20:29 crc kubenswrapper[4982]: I1205 19:20:29.090179 4982 generic.go:334] "Generic (PLEG): container finished" podID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" containerID="c8f773fc5d4b591229d6825696489487ebd6671ca057432bb1cab7d28d8567a9" exitCode=0 Dec 05 19:20:29 crc kubenswrapper[4982]: I1205 19:20:29.090201 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dv6hf" event={"ID":"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6","Type":"ContainerDied","Data":"c8f773fc5d4b591229d6825696489487ebd6671ca057432bb1cab7d28d8567a9"} Dec 05 19:20:29 crc kubenswrapper[4982]: I1205 19:20:29.090229 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dv6hf" event={"ID":"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6","Type":"ContainerStarted","Data":"756bb7cfa8cd75a3e269e6addfb36c99661c39346b9d725177864dc3da43ed15"} Dec 05 19:20:30 crc kubenswrapper[4982]: I1205 19:20:30.097617 4982 generic.go:334] "Generic (PLEG): container finished" podID="e357daf9-c7b6-4ebb-a5a0-0c1046ba7037" containerID="0e2d8f207a80806e4f6060d22c1573ad3d92561639bb7649f35ad1b9902e0b6e" exitCode=0 Dec 05 19:20:30 crc kubenswrapper[4982]: I1205 19:20:30.097695 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7llfc" event={"ID":"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037","Type":"ContainerDied","Data":"0e2d8f207a80806e4f6060d22c1573ad3d92561639bb7649f35ad1b9902e0b6e"} Dec 05 19:20:30 crc kubenswrapper[4982]: I1205 19:20:30.107503 4982 generic.go:334] "Generic (PLEG): container finished" podID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" containerID="d66d2aa8f6e2f6e2d33c283532487bbcf24bad6cc1bed6afa10c0ec200698fcf" exitCode=0 Dec 05 19:20:30 crc kubenswrapper[4982]: I1205 19:20:30.107565 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dv6hf" event={"ID":"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6","Type":"ContainerDied","Data":"d66d2aa8f6e2f6e2d33c283532487bbcf24bad6cc1bed6afa10c0ec200698fcf"} Dec 05 19:20:30 crc kubenswrapper[4982]: I1205 19:20:30.111380 4982 generic.go:334] "Generic (PLEG): container finished" podID="2b5c769d-026f-40ae-a15d-c1916e429335" containerID="e140239a3c90321802e5c04ee60de578bc5f57b1342dc3b700bd47263fdccf1d" exitCode=0 Dec 05 19:20:30 crc kubenswrapper[4982]: I1205 19:20:30.111464 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-65jpq" event={"ID":"2b5c769d-026f-40ae-a15d-c1916e429335","Type":"ContainerDied","Data":"e140239a3c90321802e5c04ee60de578bc5f57b1342dc3b700bd47263fdccf1d"} Dec 05 19:20:30 crc kubenswrapper[4982]: I1205 19:20:30.124852 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-lbk8q" event={"ID":"20fee44a-379d-443b-ae53-3d595e7bcdb1","Type":"ContainerStarted","Data":"16c0617fa194d53af4b786a60aab9f27ae8c42c7d5723da6469ef0a13c5803b1"} Dec 05 19:20:30 crc kubenswrapper[4982]: I1205 19:20:30.178293 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-lbk8q" podStartSLOduration=3.782503138 podStartE2EDuration="5.178274058s" podCreationTimestamp="2025-12-05 19:20:25 +0000 UTC" firstStartedPulling="2025-12-05 19:20:28.074836839 +0000 UTC m=+406.956722844" lastFinishedPulling="2025-12-05 19:20:29.470607759 +0000 UTC m=+408.352493764" observedRunningTime="2025-12-05 19:20:30.170856639 +0000 UTC m=+409.052742644" watchObservedRunningTime="2025-12-05 19:20:30.178274058 +0000 UTC m=+409.060160043" Dec 05 19:20:31 crc kubenswrapper[4982]: I1205 19:20:31.131354 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-65jpq" event={"ID":"2b5c769d-026f-40ae-a15d-c1916e429335","Type":"ContainerStarted","Data":"36c51de29c4fe3aff7aa8167b170278130f541c224b5520a5f1c8f6409f73ef2"} Dec 05 19:20:31 crc kubenswrapper[4982]: I1205 19:20:31.133703 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7llfc" event={"ID":"e357daf9-c7b6-4ebb-a5a0-0c1046ba7037","Type":"ContainerStarted","Data":"b1fd7fb07d9a3b266c051258fdd688b0d0a9784e9a719f62c72e21aaf969a984"} Dec 05 19:20:31 crc kubenswrapper[4982]: I1205 19:20:31.137535 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dv6hf" event={"ID":"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6","Type":"ContainerStarted","Data":"021d239e272cac7d40f8f533aefdda7d9351d3610dcd89804c0898ac46d95b67"} Dec 05 19:20:31 crc kubenswrapper[4982]: I1205 19:20:31.166321 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-65jpq" podStartSLOduration=2.6977306800000003 podStartE2EDuration="4.166293598s" podCreationTimestamp="2025-12-05 19:20:27 +0000 UTC" firstStartedPulling="2025-12-05 19:20:29.084418478 +0000 UTC m=+407.966304473" lastFinishedPulling="2025-12-05 19:20:30.552981396 +0000 UTC m=+409.434867391" observedRunningTime="2025-12-05 19:20:31.149891221 +0000 UTC m=+410.031777216" watchObservedRunningTime="2025-12-05 19:20:31.166293598 +0000 UTC m=+410.048179603" Dec 05 19:20:31 crc kubenswrapper[4982]: I1205 19:20:31.177930 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dv6hf" podStartSLOduration=2.717344018 podStartE2EDuration="4.177910293s" podCreationTimestamp="2025-12-05 19:20:27 +0000 UTC" firstStartedPulling="2025-12-05 19:20:29.091281642 +0000 UTC m=+407.973167637" lastFinishedPulling="2025-12-05 19:20:30.551847897 +0000 UTC m=+409.433733912" observedRunningTime="2025-12-05 19:20:31.175495251 +0000 UTC m=+410.057381246" watchObservedRunningTime="2025-12-05 19:20:31.177910293 +0000 UTC m=+410.059796288" Dec 05 19:20:31 crc kubenswrapper[4982]: I1205 19:20:31.195859 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7llfc" podStartSLOduration=3.754735692 podStartE2EDuration="6.195843458s" podCreationTimestamp="2025-12-05 19:20:25 +0000 UTC" firstStartedPulling="2025-12-05 19:20:28.078069941 +0000 UTC m=+406.959955946" lastFinishedPulling="2025-12-05 19:20:30.519177717 +0000 UTC m=+409.401063712" observedRunningTime="2025-12-05 19:20:31.192411631 +0000 UTC m=+410.074297646" watchObservedRunningTime="2025-12-05 19:20:31.195843458 +0000 UTC m=+410.077729453" Dec 05 19:20:35 crc kubenswrapper[4982]: I1205 19:20:34.999647 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" podUID="c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" containerName="registry" containerID="cri-o://66fb24faa1618672ae2e998bc75da2e53cddffe1ffd4996da8cb70b1ed6aa5cc" gracePeriod=30 Dec 05 19:20:35 crc kubenswrapper[4982]: I1205 19:20:35.159594 4982 generic.go:334] "Generic (PLEG): container finished" podID="c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" containerID="66fb24faa1618672ae2e998bc75da2e53cddffe1ffd4996da8cb70b1ed6aa5cc" exitCode=0 Dec 05 19:20:35 crc kubenswrapper[4982]: I1205 19:20:35.159632 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" event={"ID":"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee","Type":"ContainerDied","Data":"66fb24faa1618672ae2e998bc75da2e53cddffe1ffd4996da8cb70b1ed6aa5cc"} Dec 05 19:20:35 crc kubenswrapper[4982]: I1205 19:20:35.354348 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:35 crc kubenswrapper[4982]: I1205 19:20:35.354418 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:35 crc kubenswrapper[4982]: I1205 19:20:35.405038 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:35 crc kubenswrapper[4982]: I1205 19:20:35.566512 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:35 crc kubenswrapper[4982]: I1205 19:20:35.566562 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.078984 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.169397 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-registry-certificates\") pod \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.169468 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-registry-tls\") pod \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.169490 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-bound-sa-token\") pod \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.169512 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-installation-pull-secrets\") pod \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.169539 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-trusted-ca\") pod \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.169685 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.169735 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-ca-trust-extracted\") pod \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.169787 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zphfp\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-kube-api-access-zphfp\") pod \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\" (UID: \"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee\") " Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.175894 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.183039 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.187387 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-kube-api-access-zphfp" (OuterVolumeSpecName: "kube-api-access-zphfp") pod "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee"). InnerVolumeSpecName "kube-api-access-zphfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.188179 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" event={"ID":"c2abf723-29b6-4ce2-b7e1-d5484b0c69ee","Type":"ContainerDied","Data":"c823051d56f0588ade7bb2535f4f83e8ca4b5c65ee56b7488155709d1c28087c"} Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.188232 4982 scope.go:117] "RemoveContainer" containerID="66fb24faa1618672ae2e998bc75da2e53cddffe1ffd4996da8cb70b1ed6aa5cc" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.188836 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-z7tw9" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.190928 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.191190 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.198260 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.198517 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.199505 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" (UID: "c2abf723-29b6-4ce2-b7e1-d5484b0c69ee"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.253537 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-lbk8q" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.271709 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zphfp\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-kube-api-access-zphfp\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.271749 4982 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.271764 4982 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.271775 4982 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.271787 4982 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.271798 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.271808 4982 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.532674 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-z7tw9"] Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.544115 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-z7tw9"] Dec 05 19:20:36 crc kubenswrapper[4982]: I1205 19:20:36.623572 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7llfc" podUID="e357daf9-c7b6-4ebb-a5a0-0c1046ba7037" containerName="registry-server" probeResult="failure" output=< Dec 05 19:20:36 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Dec 05 19:20:36 crc kubenswrapper[4982]: > Dec 05 19:20:37 crc kubenswrapper[4982]: I1205 19:20:37.398203 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" path="/var/lib/kubelet/pods/c2abf723-29b6-4ce2-b7e1-d5484b0c69ee/volumes" Dec 05 19:20:37 crc kubenswrapper[4982]: I1205 19:20:37.971218 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:37 crc kubenswrapper[4982]: I1205 19:20:37.971543 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:38 crc kubenswrapper[4982]: I1205 19:20:38.015994 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:38 crc kubenswrapper[4982]: I1205 19:20:38.084786 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:38 crc kubenswrapper[4982]: I1205 19:20:38.085633 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:38 crc kubenswrapper[4982]: I1205 19:20:38.141949 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:38 crc kubenswrapper[4982]: I1205 19:20:38.248054 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-65jpq" Dec 05 19:20:38 crc kubenswrapper[4982]: I1205 19:20:38.256903 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 19:20:42 crc kubenswrapper[4982]: I1205 19:20:42.557519 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:20:42 crc kubenswrapper[4982]: I1205 19:20:42.558818 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:20:42 crc kubenswrapper[4982]: I1205 19:20:42.558963 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:20:42 crc kubenswrapper[4982]: I1205 19:20:42.559668 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"895fd01052264be16f34513524aa7f1a8429b4edeb2f6b2f6fd3e84819765ac4"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 19:20:42 crc kubenswrapper[4982]: I1205 19:20:42.559830 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://895fd01052264be16f34513524aa7f1a8429b4edeb2f6b2f6fd3e84819765ac4" gracePeriod=600 Dec 05 19:20:45 crc kubenswrapper[4982]: I1205 19:20:45.232832 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="895fd01052264be16f34513524aa7f1a8429b4edeb2f6b2f6fd3e84819765ac4" exitCode=0 Dec 05 19:20:45 crc kubenswrapper[4982]: I1205 19:20:45.232918 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"895fd01052264be16f34513524aa7f1a8429b4edeb2f6b2f6fd3e84819765ac4"} Dec 05 19:20:45 crc kubenswrapper[4982]: I1205 19:20:45.233268 4982 scope.go:117] "RemoveContainer" containerID="0497f7a1d0b9228ddfba815b1866240af412e25d2b3971a7b6c90352064997a2" Dec 05 19:20:45 crc kubenswrapper[4982]: I1205 19:20:45.615681 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:45 crc kubenswrapper[4982]: I1205 19:20:45.665174 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7llfc" Dec 05 19:20:46 crc kubenswrapper[4982]: I1205 19:20:46.241980 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"92566dcb46ab9bedfb8463976d7e833b3131cb3485bc9bee7d010a0908d75c66"} Dec 05 19:22:41 crc kubenswrapper[4982]: I1205 19:22:41.603810 4982 scope.go:117] "RemoveContainer" containerID="f82e160ec5a40fde0b658f19d15696323a6963bf4c4428bbd80cf60884b41d89" Dec 05 19:23:12 crc kubenswrapper[4982]: I1205 19:23:12.557036 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:23:12 crc kubenswrapper[4982]: I1205 19:23:12.557737 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:23:41 crc kubenswrapper[4982]: I1205 19:23:41.654955 4982 scope.go:117] "RemoveContainer" containerID="4d9393563bf6bcf82c847612352d75f11a292c1c43c8651253d20bdec9211c19" Dec 05 19:23:41 crc kubenswrapper[4982]: I1205 19:23:41.695542 4982 scope.go:117] "RemoveContainer" containerID="caab3dc6cf348f0a491612c6d71fc1d36f61b185451cf186823f4d56b36d9f45" Dec 05 19:23:42 crc kubenswrapper[4982]: I1205 19:23:42.557277 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:23:42 crc kubenswrapper[4982]: I1205 19:23:42.557634 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:24:12 crc kubenswrapper[4982]: I1205 19:24:12.557534 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:24:12 crc kubenswrapper[4982]: I1205 19:24:12.558178 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:24:12 crc kubenswrapper[4982]: I1205 19:24:12.558402 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:24:12 crc kubenswrapper[4982]: I1205 19:24:12.559552 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"92566dcb46ab9bedfb8463976d7e833b3131cb3485bc9bee7d010a0908d75c66"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 19:24:12 crc kubenswrapper[4982]: I1205 19:24:12.559665 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://92566dcb46ab9bedfb8463976d7e833b3131cb3485bc9bee7d010a0908d75c66" gracePeriod=600 Dec 05 19:24:13 crc kubenswrapper[4982]: I1205 19:24:13.682115 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="92566dcb46ab9bedfb8463976d7e833b3131cb3485bc9bee7d010a0908d75c66" exitCode=0 Dec 05 19:24:13 crc kubenswrapper[4982]: I1205 19:24:13.682196 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"92566dcb46ab9bedfb8463976d7e833b3131cb3485bc9bee7d010a0908d75c66"} Dec 05 19:24:13 crc kubenswrapper[4982]: I1205 19:24:13.682625 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"57e1b3de34ce27c53857e515661add0f1cc0b0ab673485f39e540b3920f02c13"} Dec 05 19:24:13 crc kubenswrapper[4982]: I1205 19:24:13.682662 4982 scope.go:117] "RemoveContainer" containerID="895fd01052264be16f34513524aa7f1a8429b4edeb2f6b2f6fd3e84819765ac4" Dec 05 19:24:41 crc kubenswrapper[4982]: I1205 19:24:41.738522 4982 scope.go:117] "RemoveContainer" containerID="b85048a00a069ca23572a81a47f00737f9616bd6272d359d31e5886632a2f494" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.325450 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf"] Dec 05 19:25:40 crc kubenswrapper[4982]: E1205 19:25:40.327231 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" containerName="registry" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.327338 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" containerName="registry" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.327540 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2abf723-29b6-4ce2-b7e1-d5484b0c69ee" containerName="registry" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.328501 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.330626 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.334708 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf"] Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.404886 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klk54\" (UniqueName: \"kubernetes.io/projected/e2e84dc1-b99f-497b-9ff4-d0502756b48c-kube-api-access-klk54\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf\" (UID: \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.404944 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e2e84dc1-b99f-497b-9ff4-d0502756b48c-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf\" (UID: \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.405024 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e2e84dc1-b99f-497b-9ff4-d0502756b48c-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf\" (UID: \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.506023 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klk54\" (UniqueName: \"kubernetes.io/projected/e2e84dc1-b99f-497b-9ff4-d0502756b48c-kube-api-access-klk54\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf\" (UID: \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.506093 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e2e84dc1-b99f-497b-9ff4-d0502756b48c-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf\" (UID: \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.506237 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e2e84dc1-b99f-497b-9ff4-d0502756b48c-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf\" (UID: \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.506615 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e2e84dc1-b99f-497b-9ff4-d0502756b48c-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf\" (UID: \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.506813 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e2e84dc1-b99f-497b-9ff4-d0502756b48c-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf\" (UID: \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.523770 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klk54\" (UniqueName: \"kubernetes.io/projected/e2e84dc1-b99f-497b-9ff4-d0502756b48c-kube-api-access-klk54\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf\" (UID: \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.654237 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:40 crc kubenswrapper[4982]: I1205 19:25:40.827801 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf"] Dec 05 19:25:41 crc kubenswrapper[4982]: I1205 19:25:41.263511 4982 generic.go:334] "Generic (PLEG): container finished" podID="e2e84dc1-b99f-497b-9ff4-d0502756b48c" containerID="a065e160810ca1422db5e161acab02922e38b75dc916e2a35971f52b220e2ac1" exitCode=0 Dec 05 19:25:41 crc kubenswrapper[4982]: I1205 19:25:41.263613 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" event={"ID":"e2e84dc1-b99f-497b-9ff4-d0502756b48c","Type":"ContainerDied","Data":"a065e160810ca1422db5e161acab02922e38b75dc916e2a35971f52b220e2ac1"} Dec 05 19:25:41 crc kubenswrapper[4982]: I1205 19:25:41.263742 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" event={"ID":"e2e84dc1-b99f-497b-9ff4-d0502756b48c","Type":"ContainerStarted","Data":"87d8b056474f53db596b8bfd8ab2ec486c014a44d11c2c804c6fadd4d679a38d"} Dec 05 19:25:41 crc kubenswrapper[4982]: I1205 19:25:41.266239 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 19:25:43 crc kubenswrapper[4982]: I1205 19:25:43.284892 4982 generic.go:334] "Generic (PLEG): container finished" podID="e2e84dc1-b99f-497b-9ff4-d0502756b48c" containerID="52a8d368b8654fc5684042fe4d73847ba7ef764be0f898db92ad48770660a9d9" exitCode=0 Dec 05 19:25:43 crc kubenswrapper[4982]: I1205 19:25:43.285252 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" event={"ID":"e2e84dc1-b99f-497b-9ff4-d0502756b48c","Type":"ContainerDied","Data":"52a8d368b8654fc5684042fe4d73847ba7ef764be0f898db92ad48770660a9d9"} Dec 05 19:25:44 crc kubenswrapper[4982]: I1205 19:25:44.297794 4982 generic.go:334] "Generic (PLEG): container finished" podID="e2e84dc1-b99f-497b-9ff4-d0502756b48c" containerID="6c1df467c0aafcb22a2a3fa001381e5555df324a593f90b0e0cc84049b23d0b7" exitCode=0 Dec 05 19:25:44 crc kubenswrapper[4982]: I1205 19:25:44.297885 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" event={"ID":"e2e84dc1-b99f-497b-9ff4-d0502756b48c","Type":"ContainerDied","Data":"6c1df467c0aafcb22a2a3fa001381e5555df324a593f90b0e0cc84049b23d0b7"} Dec 05 19:25:45 crc kubenswrapper[4982]: I1205 19:25:45.563910 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:45 crc kubenswrapper[4982]: I1205 19:25:45.766131 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e2e84dc1-b99f-497b-9ff4-d0502756b48c-util\") pod \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\" (UID: \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\") " Dec 05 19:25:45 crc kubenswrapper[4982]: I1205 19:25:45.766567 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klk54\" (UniqueName: \"kubernetes.io/projected/e2e84dc1-b99f-497b-9ff4-d0502756b48c-kube-api-access-klk54\") pod \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\" (UID: \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\") " Dec 05 19:25:45 crc kubenswrapper[4982]: I1205 19:25:45.766636 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e2e84dc1-b99f-497b-9ff4-d0502756b48c-bundle\") pod \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\" (UID: \"e2e84dc1-b99f-497b-9ff4-d0502756b48c\") " Dec 05 19:25:45 crc kubenswrapper[4982]: I1205 19:25:45.771259 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2e84dc1-b99f-497b-9ff4-d0502756b48c-bundle" (OuterVolumeSpecName: "bundle") pod "e2e84dc1-b99f-497b-9ff4-d0502756b48c" (UID: "e2e84dc1-b99f-497b-9ff4-d0502756b48c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:25:45 crc kubenswrapper[4982]: I1205 19:25:45.776122 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2e84dc1-b99f-497b-9ff4-d0502756b48c-kube-api-access-klk54" (OuterVolumeSpecName: "kube-api-access-klk54") pod "e2e84dc1-b99f-497b-9ff4-d0502756b48c" (UID: "e2e84dc1-b99f-497b-9ff4-d0502756b48c"). InnerVolumeSpecName "kube-api-access-klk54". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:25:45 crc kubenswrapper[4982]: I1205 19:25:45.782442 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e2e84dc1-b99f-497b-9ff4-d0502756b48c-util" (OuterVolumeSpecName: "util") pod "e2e84dc1-b99f-497b-9ff4-d0502756b48c" (UID: "e2e84dc1-b99f-497b-9ff4-d0502756b48c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:25:45 crc kubenswrapper[4982]: I1205 19:25:45.868907 4982 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e2e84dc1-b99f-497b-9ff4-d0502756b48c-util\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:45 crc kubenswrapper[4982]: I1205 19:25:45.869127 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klk54\" (UniqueName: \"kubernetes.io/projected/e2e84dc1-b99f-497b-9ff4-d0502756b48c-kube-api-access-klk54\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:45 crc kubenswrapper[4982]: I1205 19:25:45.869203 4982 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e2e84dc1-b99f-497b-9ff4-d0502756b48c-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:46 crc kubenswrapper[4982]: I1205 19:25:46.322409 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" event={"ID":"e2e84dc1-b99f-497b-9ff4-d0502756b48c","Type":"ContainerDied","Data":"87d8b056474f53db596b8bfd8ab2ec486c014a44d11c2c804c6fadd4d679a38d"} Dec 05 19:25:46 crc kubenswrapper[4982]: I1205 19:25:46.322463 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87d8b056474f53db596b8bfd8ab2ec486c014a44d11c2c804c6fadd4d679a38d" Dec 05 19:25:46 crc kubenswrapper[4982]: I1205 19:25:46.322549 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf" Dec 05 19:25:51 crc kubenswrapper[4982]: I1205 19:25:51.795747 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xlt6h"] Dec 05 19:25:51 crc kubenswrapper[4982]: I1205 19:25:51.797480 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovn-controller" containerID="cri-o://f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e" gracePeriod=30 Dec 05 19:25:51 crc kubenswrapper[4982]: I1205 19:25:51.797834 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="northd" containerID="cri-o://304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203" gracePeriod=30 Dec 05 19:25:51 crc kubenswrapper[4982]: I1205 19:25:51.797994 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="nbdb" containerID="cri-o://d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc" gracePeriod=30 Dec 05 19:25:51 crc kubenswrapper[4982]: I1205 19:25:51.797970 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="sbdb" containerID="cri-o://cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878" gracePeriod=30 Dec 05 19:25:51 crc kubenswrapper[4982]: I1205 19:25:51.798115 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08" gracePeriod=30 Dec 05 19:25:51 crc kubenswrapper[4982]: I1205 19:25:51.798123 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="kube-rbac-proxy-node" containerID="cri-o://3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee" gracePeriod=30 Dec 05 19:25:51 crc kubenswrapper[4982]: I1205 19:25:51.798231 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovn-acl-logging" containerID="cri-o://47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3" gracePeriod=30 Dec 05 19:25:51 crc kubenswrapper[4982]: I1205 19:25:51.839200 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" containerID="cri-o://038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f" gracePeriod=30 Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.133036 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/3.log" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.135478 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovn-acl-logging/0.log" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.135930 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovn-controller/0.log" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.136320 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204105 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-f8bs9"] Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204342 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="sbdb" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204358 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="sbdb" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204370 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204377 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204388 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204396 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204403 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204411 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204424 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="nbdb" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204432 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="nbdb" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204439 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e84dc1-b99f-497b-9ff4-d0502756b48c" containerName="pull" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204446 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e84dc1-b99f-497b-9ff4-d0502756b48c" containerName="pull" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204458 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="northd" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204465 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="northd" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204473 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e84dc1-b99f-497b-9ff4-d0502756b48c" containerName="extract" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204480 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e84dc1-b99f-497b-9ff4-d0502756b48c" containerName="extract" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204492 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e84dc1-b99f-497b-9ff4-d0502756b48c" containerName="util" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204499 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e84dc1-b99f-497b-9ff4-d0502756b48c" containerName="util" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204511 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204519 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204528 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="kubecfg-setup" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204536 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="kubecfg-setup" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204545 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovn-acl-logging" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204553 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovn-acl-logging" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204561 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="kube-rbac-proxy-node" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204570 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="kube-rbac-proxy-node" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204580 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovn-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204589 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovn-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204718 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="kube-rbac-proxy-node" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204731 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204741 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204750 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovn-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204758 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovn-acl-logging" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204766 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="northd" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204775 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204785 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2e84dc1-b99f-497b-9ff4-d0502756b48c" containerName="extract" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204793 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="nbdb" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204806 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204814 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204822 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="sbdb" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204953 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204963 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.204972 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.204980 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.205090 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerName="ovnkube-controller" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.206872 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229190 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-slash\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229280 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-ovn\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229327 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-kubelet\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229352 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-var-lib-cni-networks-ovn-kubernetes\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229344 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-slash" (OuterVolumeSpecName: "host-slash") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229393 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbkcn\" (UniqueName: \"kubernetes.io/projected/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-kube-api-access-sbkcn\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229416 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-cni-netd\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229423 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229429 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229451 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovnkube-config\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229477 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229488 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-env-overrides\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229448 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229510 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-systemd-units\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229532 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-var-lib-openvswitch\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229556 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-systemd\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229580 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovn-node-metrics-cert\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229605 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-run-ovn-kubernetes\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229602 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229640 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-etc-openvswitch\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229663 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-run-netns\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229688 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-openvswitch\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229709 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-cni-bin\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229738 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-node-log\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229768 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovnkube-script-lib\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229788 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-log-socket\") pod \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\" (UID: \"511e6b4b-3bb0-4288-9e2d-2d21485ef74c\") " Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229930 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229967 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-log-socket" (OuterVolumeSpecName: "log-socket") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.229965 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230002 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230020 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230035 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230055 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230073 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230093 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-node-log" (OuterVolumeSpecName: "node-log") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230107 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230169 4982 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230197 4982 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230210 4982 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230223 4982 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230238 4982 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230250 4982 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230261 4982 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230273 4982 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230285 4982 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230296 4982 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.230353 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.244325 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-kube-api-access-sbkcn" (OuterVolumeSpecName: "kube-api-access-sbkcn") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "kube-api-access-sbkcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.245871 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.255666 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "511e6b4b-3bb0-4288-9e2d-2d21485ef74c" (UID: "511e6b4b-3bb0-4288-9e2d-2d21485ef74c"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.330847 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-run-netns\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.330896 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a5664f01-db6b-464e-b3af-334b02ef6564-ovnkube-script-lib\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.330916 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-kubelet\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.330935 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-cni-bin\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.330955 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-log-socket\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.330975 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-etc-openvswitch\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.330992 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-run-ovn-kubernetes\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331006 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-cni-netd\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331023 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-run-openvswitch\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331042 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331063 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k75d7\" (UniqueName: \"kubernetes.io/projected/a5664f01-db6b-464e-b3af-334b02ef6564-kube-api-access-k75d7\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331080 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-run-systemd\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331103 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-node-log\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331125 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a5664f01-db6b-464e-b3af-334b02ef6564-ovn-node-metrics-cert\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331165 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-run-ovn\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331184 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-slash\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331199 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-systemd-units\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331213 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-var-lib-openvswitch\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331228 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a5664f01-db6b-464e-b3af-334b02ef6564-ovnkube-config\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331242 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a5664f01-db6b-464e-b3af-334b02ef6564-env-overrides\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331279 4982 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331291 4982 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331301 4982 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331309 4982 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331317 4982 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331325 4982 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331334 4982 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331342 4982 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331350 4982 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.331359 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbkcn\" (UniqueName: \"kubernetes.io/projected/511e6b4b-3bb0-4288-9e2d-2d21485ef74c-kube-api-access-sbkcn\") on node \"crc\" DevicePath \"\"" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.358285 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8ldph_62e7c34f-d411-481e-a5bb-885e7cbd4326/kube-multus/2.log" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.358659 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8ldph_62e7c34f-d411-481e-a5bb-885e7cbd4326/kube-multus/1.log" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.358690 4982 generic.go:334] "Generic (PLEG): container finished" podID="62e7c34f-d411-481e-a5bb-885e7cbd4326" containerID="3524cbce9c0eb2e2fc04acf3c6d9153a434e42cf73e554bfa8a53608ce416b87" exitCode=2 Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.358730 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8ldph" event={"ID":"62e7c34f-d411-481e-a5bb-885e7cbd4326","Type":"ContainerDied","Data":"3524cbce9c0eb2e2fc04acf3c6d9153a434e42cf73e554bfa8a53608ce416b87"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.358763 4982 scope.go:117] "RemoveContainer" containerID="2a8d874cf486b2c543493d14189c417e5514f5a9af33741f08b0993528368bbb" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.359121 4982 scope.go:117] "RemoveContainer" containerID="3524cbce9c0eb2e2fc04acf3c6d9153a434e42cf73e554bfa8a53608ce416b87" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.359387 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-8ldph_openshift-multus(62e7c34f-d411-481e-a5bb-885e7cbd4326)\"" pod="openshift-multus/multus-8ldph" podUID="62e7c34f-d411-481e-a5bb-885e7cbd4326" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.362708 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovnkube-controller/3.log" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.364602 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovn-acl-logging/0.log" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365005 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-xlt6h_511e6b4b-3bb0-4288-9e2d-2d21485ef74c/ovn-controller/0.log" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365349 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f" exitCode=0 Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365370 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878" exitCode=0 Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365377 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc" exitCode=0 Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365383 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203" exitCode=0 Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365390 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08" exitCode=0 Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365396 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee" exitCode=0 Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365403 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3" exitCode=143 Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365410 4982 generic.go:334] "Generic (PLEG): container finished" podID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" containerID="f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e" exitCode=143 Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365430 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365455 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365467 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365476 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365485 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365496 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365491 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365507 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365517 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365523 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365529 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365535 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365540 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365545 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365550 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365554 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365559 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365565 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365573 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365578 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365584 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365588 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365593 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365598 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365603 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365607 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365612 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365617 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365623 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365630 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365637 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365643 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365648 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365653 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365657 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365664 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365668 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365673 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365679 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365685 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-xlt6h" event={"ID":"511e6b4b-3bb0-4288-9e2d-2d21485ef74c","Type":"ContainerDied","Data":"4279575fcbf4c051e9e722e9f79ed5d62fb0f1c0c207ff62518ac7c3cc8f1417"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365692 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365699 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365705 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365710 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365715 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365720 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365725 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365729 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365734 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.365739 4982 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3"} Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.382757 4982 scope.go:117] "RemoveContainer" containerID="038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.396043 4982 scope.go:117] "RemoveContainer" containerID="44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.416018 4982 scope.go:117] "RemoveContainer" containerID="cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.432290 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-run-ovn\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433286 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-run-ovn\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433343 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-slash\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433367 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-var-lib-openvswitch\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433385 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-systemd-units\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433402 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a5664f01-db6b-464e-b3af-334b02ef6564-env-overrides\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433418 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a5664f01-db6b-464e-b3af-334b02ef6564-ovnkube-config\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433438 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-var-lib-openvswitch\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433443 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-run-netns\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433478 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-run-netns\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433493 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a5664f01-db6b-464e-b3af-334b02ef6564-ovnkube-script-lib\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433469 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-slash\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433506 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-systemd-units\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433534 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-kubelet\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433518 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-kubelet\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433585 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-cni-bin\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433621 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-log-socket\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433655 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-etc-openvswitch\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433680 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-run-ovn-kubernetes\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433703 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-cni-netd\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433736 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-run-openvswitch\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433766 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433810 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k75d7\" (UniqueName: \"kubernetes.io/projected/a5664f01-db6b-464e-b3af-334b02ef6564-kube-api-access-k75d7\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433832 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-run-systemd\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433884 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-node-log\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.433937 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a5664f01-db6b-464e-b3af-334b02ef6564-ovn-node-metrics-cert\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.434040 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a5664f01-db6b-464e-b3af-334b02ef6564-env-overrides\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.434073 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a5664f01-db6b-464e-b3af-334b02ef6564-ovnkube-config\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.434107 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-cni-netd\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.434109 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-cni-bin\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.434131 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-etc-openvswitch\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.434138 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-run-ovn-kubernetes\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.434188 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-log-socket\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.434214 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-run-openvswitch\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.434235 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.434254 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a5664f01-db6b-464e-b3af-334b02ef6564-ovnkube-script-lib\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.434257 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-run-systemd\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.434273 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a5664f01-db6b-464e-b3af-334b02ef6564-node-log\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.437792 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a5664f01-db6b-464e-b3af-334b02ef6564-ovn-node-metrics-cert\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.438356 4982 scope.go:117] "RemoveContainer" containerID="d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.449816 4982 scope.go:117] "RemoveContainer" containerID="304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.461889 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k75d7\" (UniqueName: \"kubernetes.io/projected/a5664f01-db6b-464e-b3af-334b02ef6564-kube-api-access-k75d7\") pod \"ovnkube-node-f8bs9\" (UID: \"a5664f01-db6b-464e-b3af-334b02ef6564\") " pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.473465 4982 scope.go:117] "RemoveContainer" containerID="1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.474549 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xlt6h"] Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.481400 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-xlt6h"] Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.488240 4982 scope.go:117] "RemoveContainer" containerID="3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.504140 4982 scope.go:117] "RemoveContainer" containerID="47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.520410 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.526329 4982 scope.go:117] "RemoveContainer" containerID="f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.553000 4982 scope.go:117] "RemoveContainer" containerID="2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.566513 4982 scope.go:117] "RemoveContainer" containerID="038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.567198 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f\": container with ID starting with 038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f not found: ID does not exist" containerID="038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.567228 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f"} err="failed to get container status \"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f\": rpc error: code = NotFound desc = could not find container \"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f\": container with ID starting with 038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.567247 4982 scope.go:117] "RemoveContainer" containerID="44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.569068 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\": container with ID starting with 44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682 not found: ID does not exist" containerID="44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.569109 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682"} err="failed to get container status \"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\": rpc error: code = NotFound desc = could not find container \"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\": container with ID starting with 44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.569132 4982 scope.go:117] "RemoveContainer" containerID="cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.569601 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\": container with ID starting with cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878 not found: ID does not exist" containerID="cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.569647 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878"} err="failed to get container status \"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\": rpc error: code = NotFound desc = could not find container \"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\": container with ID starting with cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.569684 4982 scope.go:117] "RemoveContainer" containerID="d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.569951 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\": container with ID starting with d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc not found: ID does not exist" containerID="d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.569985 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc"} err="failed to get container status \"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\": rpc error: code = NotFound desc = could not find container \"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\": container with ID starting with d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.570002 4982 scope.go:117] "RemoveContainer" containerID="304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.570267 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\": container with ID starting with 304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203 not found: ID does not exist" containerID="304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.570298 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203"} err="failed to get container status \"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\": rpc error: code = NotFound desc = could not find container \"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\": container with ID starting with 304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.570317 4982 scope.go:117] "RemoveContainer" containerID="1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.570528 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\": container with ID starting with 1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08 not found: ID does not exist" containerID="1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.570555 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08"} err="failed to get container status \"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\": rpc error: code = NotFound desc = could not find container \"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\": container with ID starting with 1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.570572 4982 scope.go:117] "RemoveContainer" containerID="3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.570792 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\": container with ID starting with 3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee not found: ID does not exist" containerID="3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.570825 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee"} err="failed to get container status \"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\": rpc error: code = NotFound desc = could not find container \"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\": container with ID starting with 3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.570842 4982 scope.go:117] "RemoveContainer" containerID="47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.571025 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\": container with ID starting with 47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3 not found: ID does not exist" containerID="47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.571050 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3"} err="failed to get container status \"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\": rpc error: code = NotFound desc = could not find container \"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\": container with ID starting with 47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.571067 4982 scope.go:117] "RemoveContainer" containerID="f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.571256 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\": container with ID starting with f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e not found: ID does not exist" containerID="f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.571286 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e"} err="failed to get container status \"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\": rpc error: code = NotFound desc = could not find container \"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\": container with ID starting with f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.571303 4982 scope.go:117] "RemoveContainer" containerID="2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3" Dec 05 19:25:52 crc kubenswrapper[4982]: E1205 19:25:52.571459 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\": container with ID starting with 2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3 not found: ID does not exist" containerID="2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.571483 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3"} err="failed to get container status \"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\": rpc error: code = NotFound desc = could not find container \"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\": container with ID starting with 2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.571500 4982 scope.go:117] "RemoveContainer" containerID="038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.571671 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f"} err="failed to get container status \"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f\": rpc error: code = NotFound desc = could not find container \"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f\": container with ID starting with 038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.571697 4982 scope.go:117] "RemoveContainer" containerID="44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.571870 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682"} err="failed to get container status \"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\": rpc error: code = NotFound desc = could not find container \"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\": container with ID starting with 44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.571894 4982 scope.go:117] "RemoveContainer" containerID="cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.572071 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878"} err="failed to get container status \"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\": rpc error: code = NotFound desc = could not find container \"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\": container with ID starting with cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.572093 4982 scope.go:117] "RemoveContainer" containerID="d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.572260 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc"} err="failed to get container status \"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\": rpc error: code = NotFound desc = could not find container \"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\": container with ID starting with d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.572283 4982 scope.go:117] "RemoveContainer" containerID="304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.572435 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203"} err="failed to get container status \"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\": rpc error: code = NotFound desc = could not find container \"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\": container with ID starting with 304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.572465 4982 scope.go:117] "RemoveContainer" containerID="1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.572653 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08"} err="failed to get container status \"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\": rpc error: code = NotFound desc = could not find container \"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\": container with ID starting with 1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.572677 4982 scope.go:117] "RemoveContainer" containerID="3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.572877 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee"} err="failed to get container status \"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\": rpc error: code = NotFound desc = could not find container \"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\": container with ID starting with 3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.572901 4982 scope.go:117] "RemoveContainer" containerID="47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.573087 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3"} err="failed to get container status \"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\": rpc error: code = NotFound desc = could not find container \"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\": container with ID starting with 47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.573106 4982 scope.go:117] "RemoveContainer" containerID="f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.573360 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e"} err="failed to get container status \"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\": rpc error: code = NotFound desc = could not find container \"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\": container with ID starting with f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.573388 4982 scope.go:117] "RemoveContainer" containerID="2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.573595 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3"} err="failed to get container status \"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\": rpc error: code = NotFound desc = could not find container \"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\": container with ID starting with 2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.573615 4982 scope.go:117] "RemoveContainer" containerID="038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.573812 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f"} err="failed to get container status \"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f\": rpc error: code = NotFound desc = could not find container \"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f\": container with ID starting with 038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.573830 4982 scope.go:117] "RemoveContainer" containerID="44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.574004 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682"} err="failed to get container status \"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\": rpc error: code = NotFound desc = could not find container \"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\": container with ID starting with 44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.574024 4982 scope.go:117] "RemoveContainer" containerID="cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.574209 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878"} err="failed to get container status \"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\": rpc error: code = NotFound desc = could not find container \"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\": container with ID starting with cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.574228 4982 scope.go:117] "RemoveContainer" containerID="d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.574433 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc"} err="failed to get container status \"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\": rpc error: code = NotFound desc = could not find container \"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\": container with ID starting with d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.574451 4982 scope.go:117] "RemoveContainer" containerID="304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.574635 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203"} err="failed to get container status \"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\": rpc error: code = NotFound desc = could not find container \"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\": container with ID starting with 304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.574655 4982 scope.go:117] "RemoveContainer" containerID="1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.574840 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08"} err="failed to get container status \"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\": rpc error: code = NotFound desc = could not find container \"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\": container with ID starting with 1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.574859 4982 scope.go:117] "RemoveContainer" containerID="3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.575026 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee"} err="failed to get container status \"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\": rpc error: code = NotFound desc = could not find container \"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\": container with ID starting with 3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.575046 4982 scope.go:117] "RemoveContainer" containerID="47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.575221 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3"} err="failed to get container status \"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\": rpc error: code = NotFound desc = could not find container \"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\": container with ID starting with 47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.575244 4982 scope.go:117] "RemoveContainer" containerID="f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.575429 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e"} err="failed to get container status \"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\": rpc error: code = NotFound desc = could not find container \"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\": container with ID starting with f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.575448 4982 scope.go:117] "RemoveContainer" containerID="2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.575621 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3"} err="failed to get container status \"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\": rpc error: code = NotFound desc = could not find container \"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\": container with ID starting with 2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.575639 4982 scope.go:117] "RemoveContainer" containerID="038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.575808 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f"} err="failed to get container status \"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f\": rpc error: code = NotFound desc = could not find container \"038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f\": container with ID starting with 038e94ce6270170c647c5af12c8cbd989ed8066a3cf1a78fa9be1009af133a7f not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.575826 4982 scope.go:117] "RemoveContainer" containerID="44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.575985 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682"} err="failed to get container status \"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\": rpc error: code = NotFound desc = could not find container \"44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682\": container with ID starting with 44f6731f21604ed943aa2c79e26cc1bc8f7944ea358a399276c3923ea0de9682 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576006 4982 scope.go:117] "RemoveContainer" containerID="cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576181 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878"} err="failed to get container status \"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\": rpc error: code = NotFound desc = could not find container \"cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878\": container with ID starting with cfd89fdf87bcfa6a677e89579adbfbd0a8dea66c75687bcabb628dbc08bc6878 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576199 4982 scope.go:117] "RemoveContainer" containerID="d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576352 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc"} err="failed to get container status \"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\": rpc error: code = NotFound desc = could not find container \"d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc\": container with ID starting with d935cbc224e62a27f4de44a08b0efec334f7c0f2238399752b0dca83272ad3fc not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576368 4982 scope.go:117] "RemoveContainer" containerID="304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576502 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203"} err="failed to get container status \"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\": rpc error: code = NotFound desc = could not find container \"304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203\": container with ID starting with 304b90f0942b38c3df36776146e25835ea265191a3cdf217bdabd0f9a130b203 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576517 4982 scope.go:117] "RemoveContainer" containerID="1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576645 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08"} err="failed to get container status \"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\": rpc error: code = NotFound desc = could not find container \"1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08\": container with ID starting with 1cb29ebe4d301b864db0c4a5f93852604ad6054fc60ab965769f44841ee8bf08 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576659 4982 scope.go:117] "RemoveContainer" containerID="3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576783 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee"} err="failed to get container status \"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\": rpc error: code = NotFound desc = could not find container \"3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee\": container with ID starting with 3568fe5c40d33f888a8bc6b40da6b59fe03bdefa270c58d4a76fe8c38bcd8bee not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576797 4982 scope.go:117] "RemoveContainer" containerID="47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576924 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3"} err="failed to get container status \"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\": rpc error: code = NotFound desc = could not find container \"47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3\": container with ID starting with 47b19de56af97e49e0d990adbe51a05a141e361bf33978ea4c79640306d293d3 not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.576938 4982 scope.go:117] "RemoveContainer" containerID="f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.577085 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e"} err="failed to get container status \"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\": rpc error: code = NotFound desc = could not find container \"f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e\": container with ID starting with f6a81df4267e49cc9f1d001ab95a158aeb7a98f2ad1d39aa592c8f160194ed5e not found: ID does not exist" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.577100 4982 scope.go:117] "RemoveContainer" containerID="2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3" Dec 05 19:25:52 crc kubenswrapper[4982]: I1205 19:25:52.577260 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3"} err="failed to get container status \"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\": rpc error: code = NotFound desc = could not find container \"2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3\": container with ID starting with 2cfa4fbcb4a7ec7a0b96d7c39f75284d0b57af11227b8beeaf9a048494ccefc3 not found: ID does not exist" Dec 05 19:25:53 crc kubenswrapper[4982]: I1205 19:25:53.370928 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8ldph_62e7c34f-d411-481e-a5bb-885e7cbd4326/kube-multus/2.log" Dec 05 19:25:53 crc kubenswrapper[4982]: I1205 19:25:53.372800 4982 generic.go:334] "Generic (PLEG): container finished" podID="a5664f01-db6b-464e-b3af-334b02ef6564" containerID="fe1897f15d0882d0d03b49a1f1d27199d050f8096e6934741f8fece019c39c51" exitCode=0 Dec 05 19:25:53 crc kubenswrapper[4982]: I1205 19:25:53.372832 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" event={"ID":"a5664f01-db6b-464e-b3af-334b02ef6564","Type":"ContainerDied","Data":"fe1897f15d0882d0d03b49a1f1d27199d050f8096e6934741f8fece019c39c51"} Dec 05 19:25:53 crc kubenswrapper[4982]: I1205 19:25:53.372851 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" event={"ID":"a5664f01-db6b-464e-b3af-334b02ef6564","Type":"ContainerStarted","Data":"87f5dc922a8097ad5765e391c69a6ef63b32c472875aa1e414b765ad725d968c"} Dec 05 19:25:53 crc kubenswrapper[4982]: I1205 19:25:53.398114 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="511e6b4b-3bb0-4288-9e2d-2d21485ef74c" path="/var/lib/kubelet/pods/511e6b4b-3bb0-4288-9e2d-2d21485ef74c/volumes" Dec 05 19:25:54 crc kubenswrapper[4982]: I1205 19:25:54.380236 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" event={"ID":"a5664f01-db6b-464e-b3af-334b02ef6564","Type":"ContainerStarted","Data":"1d3331155cbbbcf42d7fcec7d9b6766b3999c9b6af968204711cd42f5e24810c"} Dec 05 19:25:54 crc kubenswrapper[4982]: I1205 19:25:54.380670 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" event={"ID":"a5664f01-db6b-464e-b3af-334b02ef6564","Type":"ContainerStarted","Data":"335d9b7ccb44958fb1e0dc831692bb286f0ca38a69f8f352ae6825f7e13a8840"} Dec 05 19:25:54 crc kubenswrapper[4982]: I1205 19:25:54.380682 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" event={"ID":"a5664f01-db6b-464e-b3af-334b02ef6564","Type":"ContainerStarted","Data":"74b8923343308df2f2d0212847aa34a5e78f8a2e32732ff52159732b7ce88b84"} Dec 05 19:25:54 crc kubenswrapper[4982]: I1205 19:25:54.380691 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" event={"ID":"a5664f01-db6b-464e-b3af-334b02ef6564","Type":"ContainerStarted","Data":"bfad1466cec6c5924589a9e7dcbf433d7126c5bcd40720d69640c60c635a262c"} Dec 05 19:25:54 crc kubenswrapper[4982]: I1205 19:25:54.380700 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" event={"ID":"a5664f01-db6b-464e-b3af-334b02ef6564","Type":"ContainerStarted","Data":"8a27cabdd13a9d331376b2ef478f504acd75648fcc23dec8736fdb70f282c57c"} Dec 05 19:25:54 crc kubenswrapper[4982]: I1205 19:25:54.380730 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" event={"ID":"a5664f01-db6b-464e-b3af-334b02ef6564","Type":"ContainerStarted","Data":"0709f84123425a589277205e00947147e18221d76e2a12f485d65f4342156528"} Dec 05 19:25:57 crc kubenswrapper[4982]: I1205 19:25:57.399027 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" event={"ID":"a5664f01-db6b-464e-b3af-334b02ef6564","Type":"ContainerStarted","Data":"bce03ce86e1aaea595ad3232cc864daabb7ae3aaf87b077c2db5653443b4ba9c"} Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.117870 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck"] Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.118502 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.121370 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.121491 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-psrhz" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.121495 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.201010 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xzlg\" (UniqueName: \"kubernetes.io/projected/e2c8beeb-010c-4aac-b407-981a15acaee9-kube-api-access-5xzlg\") pod \"obo-prometheus-operator-668cf9dfbb-5mhck\" (UID: \"e2c8beeb-010c-4aac-b407-981a15acaee9\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.240710 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz"] Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.241499 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.243294 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-bjtgr" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.243843 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.256749 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw"] Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.259568 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.302476 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xzlg\" (UniqueName: \"kubernetes.io/projected/e2c8beeb-010c-4aac-b407-981a15acaee9-kube-api-access-5xzlg\") pod \"obo-prometheus-operator-668cf9dfbb-5mhck\" (UID: \"e2c8beeb-010c-4aac-b407-981a15acaee9\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.325068 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xzlg\" (UniqueName: \"kubernetes.io/projected/e2c8beeb-010c-4aac-b407-981a15acaee9-kube-api-access-5xzlg\") pod \"obo-prometheus-operator-668cf9dfbb-5mhck\" (UID: \"e2c8beeb-010c-4aac-b407-981a15acaee9\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.346294 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-nwfp4"] Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.347054 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.348693 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-7jthg" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.349500 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.403303 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1624c766-4ba0-48bf-a7b7-4a8322251e2e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw\" (UID: \"1624c766-4ba0-48bf-a7b7-4a8322251e2e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.403384 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1624c766-4ba0-48bf-a7b7-4a8322251e2e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw\" (UID: \"1624c766-4ba0-48bf-a7b7-4a8322251e2e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.403404 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fe591891-51d5-49c8-880a-213703150e27-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz\" (UID: \"fe591891-51d5-49c8-880a-213703150e27\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.403487 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fe591891-51d5-49c8-880a-213703150e27-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz\" (UID: \"fe591891-51d5-49c8-880a-213703150e27\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.434363 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.446311 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-lz5fb"] Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.447512 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.449720 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-jpkcp" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.461699 4982 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5mhck_openshift-operators_e2c8beeb-010c-4aac-b407-981a15acaee9_0(e1f96776bec2bbd04e8b3f4c2b9526ab76c6a323950833db63bd2b58ab08477d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.461773 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5mhck_openshift-operators_e2c8beeb-010c-4aac-b407-981a15acaee9_0(e1f96776bec2bbd04e8b3f4c2b9526ab76c6a323950833db63bd2b58ab08477d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.461798 4982 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5mhck_openshift-operators_e2c8beeb-010c-4aac-b407-981a15acaee9_0(e1f96776bec2bbd04e8b3f4c2b9526ab76c6a323950833db63bd2b58ab08477d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.461857 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-5mhck_openshift-operators(e2c8beeb-010c-4aac-b407-981a15acaee9)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-5mhck_openshift-operators(e2c8beeb-010c-4aac-b407-981a15acaee9)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5mhck_openshift-operators_e2c8beeb-010c-4aac-b407-981a15acaee9_0(e1f96776bec2bbd04e8b3f4c2b9526ab76c6a323950833db63bd2b58ab08477d): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" podUID="e2c8beeb-010c-4aac-b407-981a15acaee9" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.505760 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1624c766-4ba0-48bf-a7b7-4a8322251e2e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw\" (UID: \"1624c766-4ba0-48bf-a7b7-4a8322251e2e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.505805 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/abe3cbe3-f02e-4fc5-81e5-cb02da29d18b-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-nwfp4\" (UID: \"abe3cbe3-f02e-4fc5-81e5-cb02da29d18b\") " pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.505892 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktbxs\" (UniqueName: \"kubernetes.io/projected/abe3cbe3-f02e-4fc5-81e5-cb02da29d18b-kube-api-access-ktbxs\") pod \"observability-operator-d8bb48f5d-nwfp4\" (UID: \"abe3cbe3-f02e-4fc5-81e5-cb02da29d18b\") " pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.505913 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1624c766-4ba0-48bf-a7b7-4a8322251e2e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw\" (UID: \"1624c766-4ba0-48bf-a7b7-4a8322251e2e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.505931 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fe591891-51d5-49c8-880a-213703150e27-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz\" (UID: \"fe591891-51d5-49c8-880a-213703150e27\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.506002 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fe591891-51d5-49c8-880a-213703150e27-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz\" (UID: \"fe591891-51d5-49c8-880a-213703150e27\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.523902 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1624c766-4ba0-48bf-a7b7-4a8322251e2e-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw\" (UID: \"1624c766-4ba0-48bf-a7b7-4a8322251e2e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.526670 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1624c766-4ba0-48bf-a7b7-4a8322251e2e-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw\" (UID: \"1624c766-4ba0-48bf-a7b7-4a8322251e2e\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.527666 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/fe591891-51d5-49c8-880a-213703150e27-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz\" (UID: \"fe591891-51d5-49c8-880a-213703150e27\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.527706 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/fe591891-51d5-49c8-880a-213703150e27-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz\" (UID: \"fe591891-51d5-49c8-880a-213703150e27\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.566225 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.585139 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.611217 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/abe3cbe3-f02e-4fc5-81e5-cb02da29d18b-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-nwfp4\" (UID: \"abe3cbe3-f02e-4fc5-81e5-cb02da29d18b\") " pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.611629 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktbxs\" (UniqueName: \"kubernetes.io/projected/abe3cbe3-f02e-4fc5-81e5-cb02da29d18b-kube-api-access-ktbxs\") pod \"observability-operator-d8bb48f5d-nwfp4\" (UID: \"abe3cbe3-f02e-4fc5-81e5-cb02da29d18b\") " pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.611731 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjgqq\" (UniqueName: \"kubernetes.io/projected/15088f20-542f-426d-9e0f-cfb52b660483-kube-api-access-bjgqq\") pod \"perses-operator-5446b9c989-lz5fb\" (UID: \"15088f20-542f-426d-9e0f-cfb52b660483\") " pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.612852 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/15088f20-542f-426d-9e0f-cfb52b660483-openshift-service-ca\") pod \"perses-operator-5446b9c989-lz5fb\" (UID: \"15088f20-542f-426d-9e0f-cfb52b660483\") " pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.621430 4982 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_openshift-operators_1624c766-4ba0-48bf-a7b7-4a8322251e2e_0(16ae4b814dbb38097c376295f0c5aa093b63af1a4c28453d2e80cd7d846673c2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.621518 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_openshift-operators_1624c766-4ba0-48bf-a7b7-4a8322251e2e_0(16ae4b814dbb38097c376295f0c5aa093b63af1a4c28453d2e80cd7d846673c2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.621544 4982 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_openshift-operators_1624c766-4ba0-48bf-a7b7-4a8322251e2e_0(16ae4b814dbb38097c376295f0c5aa093b63af1a4c28453d2e80cd7d846673c2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.621595 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_openshift-operators(1624c766-4ba0-48bf-a7b7-4a8322251e2e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_openshift-operators(1624c766-4ba0-48bf-a7b7-4a8322251e2e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_openshift-operators_1624c766-4ba0-48bf-a7b7-4a8322251e2e_0(16ae4b814dbb38097c376295f0c5aa093b63af1a4c28453d2e80cd7d846673c2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" podUID="1624c766-4ba0-48bf-a7b7-4a8322251e2e" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.622171 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/abe3cbe3-f02e-4fc5-81e5-cb02da29d18b-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-nwfp4\" (UID: \"abe3cbe3-f02e-4fc5-81e5-cb02da29d18b\") " pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.627253 4982 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_openshift-operators_fe591891-51d5-49c8-880a-213703150e27_0(ad83f66af77ea5af2bad9df1db60e9223aa8683869581e0ac9111ee9d226c98c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.627313 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_openshift-operators_fe591891-51d5-49c8-880a-213703150e27_0(ad83f66af77ea5af2bad9df1db60e9223aa8683869581e0ac9111ee9d226c98c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.627333 4982 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_openshift-operators_fe591891-51d5-49c8-880a-213703150e27_0(ad83f66af77ea5af2bad9df1db60e9223aa8683869581e0ac9111ee9d226c98c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.627374 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_openshift-operators(fe591891-51d5-49c8-880a-213703150e27)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_openshift-operators(fe591891-51d5-49c8-880a-213703150e27)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_openshift-operators_fe591891-51d5-49c8-880a-213703150e27_0(ad83f66af77ea5af2bad9df1db60e9223aa8683869581e0ac9111ee9d226c98c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" podUID="fe591891-51d5-49c8-880a-213703150e27" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.628857 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktbxs\" (UniqueName: \"kubernetes.io/projected/abe3cbe3-f02e-4fc5-81e5-cb02da29d18b-kube-api-access-ktbxs\") pod \"observability-operator-d8bb48f5d-nwfp4\" (UID: \"abe3cbe3-f02e-4fc5-81e5-cb02da29d18b\") " pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.666845 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.690215 4982 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-nwfp4_openshift-operators_abe3cbe3-f02e-4fc5-81e5-cb02da29d18b_0(1b4a00fe967acdb3c2e48ef8eb22cb0c075ff8d7fa9f6edbf278ad34914199d4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.690295 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-nwfp4_openshift-operators_abe3cbe3-f02e-4fc5-81e5-cb02da29d18b_0(1b4a00fe967acdb3c2e48ef8eb22cb0c075ff8d7fa9f6edbf278ad34914199d4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.690323 4982 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-nwfp4_openshift-operators_abe3cbe3-f02e-4fc5-81e5-cb02da29d18b_0(1b4a00fe967acdb3c2e48ef8eb22cb0c075ff8d7fa9f6edbf278ad34914199d4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.690373 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-nwfp4_openshift-operators(abe3cbe3-f02e-4fc5-81e5-cb02da29d18b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-nwfp4_openshift-operators(abe3cbe3-f02e-4fc5-81e5-cb02da29d18b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-nwfp4_openshift-operators_abe3cbe3-f02e-4fc5-81e5-cb02da29d18b_0(1b4a00fe967acdb3c2e48ef8eb22cb0c075ff8d7fa9f6edbf278ad34914199d4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" podUID="abe3cbe3-f02e-4fc5-81e5-cb02da29d18b" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.714088 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjgqq\" (UniqueName: \"kubernetes.io/projected/15088f20-542f-426d-9e0f-cfb52b660483-kube-api-access-bjgqq\") pod \"perses-operator-5446b9c989-lz5fb\" (UID: \"15088f20-542f-426d-9e0f-cfb52b660483\") " pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.714168 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/15088f20-542f-426d-9e0f-cfb52b660483-openshift-service-ca\") pod \"perses-operator-5446b9c989-lz5fb\" (UID: \"15088f20-542f-426d-9e0f-cfb52b660483\") " pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.714952 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/15088f20-542f-426d-9e0f-cfb52b660483-openshift-service-ca\") pod \"perses-operator-5446b9c989-lz5fb\" (UID: \"15088f20-542f-426d-9e0f-cfb52b660483\") " pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.732053 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjgqq\" (UniqueName: \"kubernetes.io/projected/15088f20-542f-426d-9e0f-cfb52b660483-kube-api-access-bjgqq\") pod \"perses-operator-5446b9c989-lz5fb\" (UID: \"15088f20-542f-426d-9e0f-cfb52b660483\") " pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:58 crc kubenswrapper[4982]: I1205 19:25:58.795055 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.814776 4982 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-lz5fb_openshift-operators_15088f20-542f-426d-9e0f-cfb52b660483_0(da70654798f176919755e7e4c7387e530cf12ba080b9f27b0e7250da937edc56): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.814844 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-lz5fb_openshift-operators_15088f20-542f-426d-9e0f-cfb52b660483_0(da70654798f176919755e7e4c7387e530cf12ba080b9f27b0e7250da937edc56): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.814869 4982 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-lz5fb_openshift-operators_15088f20-542f-426d-9e0f-cfb52b660483_0(da70654798f176919755e7e4c7387e530cf12ba080b9f27b0e7250da937edc56): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:58 crc kubenswrapper[4982]: E1205 19:25:58.814929 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-lz5fb_openshift-operators(15088f20-542f-426d-9e0f-cfb52b660483)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-lz5fb_openshift-operators(15088f20-542f-426d-9e0f-cfb52b660483)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-lz5fb_openshift-operators_15088f20-542f-426d-9e0f-cfb52b660483_0(da70654798f176919755e7e4c7387e530cf12ba080b9f27b0e7250da937edc56): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" podUID="15088f20-542f-426d-9e0f-cfb52b660483" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.411681 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" event={"ID":"a5664f01-db6b-464e-b3af-334b02ef6564","Type":"ContainerStarted","Data":"dbd25269c6b16bc2a44741fbd9110bd630c4d5b1750c26e9b077904d5fdffb89"} Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.412053 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.412076 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.437623 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" podStartSLOduration=7.437607229 podStartE2EDuration="7.437607229s" podCreationTimestamp="2025-12-05 19:25:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:25:59.436428489 +0000 UTC m=+738.318314494" watchObservedRunningTime="2025-12-05 19:25:59.437607229 +0000 UTC m=+738.319493224" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.445037 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.488384 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw"] Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.488480 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.488898 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.492740 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz"] Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.492856 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.493311 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.497415 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-nwfp4"] Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.497527 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.497957 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.511361 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-lz5fb"] Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.511450 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.511793 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.531027 4982 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_openshift-operators_1624c766-4ba0-48bf-a7b7-4a8322251e2e_0(3293d1a647ef63006e4ae28a1e29b71079dbbd2828e560a55cdef46a64ec3394): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.531103 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_openshift-operators_1624c766-4ba0-48bf-a7b7-4a8322251e2e_0(3293d1a647ef63006e4ae28a1e29b71079dbbd2828e560a55cdef46a64ec3394): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.531130 4982 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_openshift-operators_1624c766-4ba0-48bf-a7b7-4a8322251e2e_0(3293d1a647ef63006e4ae28a1e29b71079dbbd2828e560a55cdef46a64ec3394): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.531197 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_openshift-operators(1624c766-4ba0-48bf-a7b7-4a8322251e2e)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_openshift-operators(1624c766-4ba0-48bf-a7b7-4a8322251e2e)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_openshift-operators_1624c766-4ba0-48bf-a7b7-4a8322251e2e_0(3293d1a647ef63006e4ae28a1e29b71079dbbd2828e560a55cdef46a64ec3394): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" podUID="1624c766-4ba0-48bf-a7b7-4a8322251e2e" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.554601 4982 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_openshift-operators_fe591891-51d5-49c8-880a-213703150e27_0(5aaf9dee0aedcd11c1a3a0258f80199f2cab1d2bfe0625269763be13a5d650e4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.554660 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_openshift-operators_fe591891-51d5-49c8-880a-213703150e27_0(5aaf9dee0aedcd11c1a3a0258f80199f2cab1d2bfe0625269763be13a5d650e4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.554679 4982 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_openshift-operators_fe591891-51d5-49c8-880a-213703150e27_0(5aaf9dee0aedcd11c1a3a0258f80199f2cab1d2bfe0625269763be13a5d650e4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.554716 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_openshift-operators(fe591891-51d5-49c8-880a-213703150e27)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_openshift-operators(fe591891-51d5-49c8-880a-213703150e27)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_openshift-operators_fe591891-51d5-49c8-880a-213703150e27_0(5aaf9dee0aedcd11c1a3a0258f80199f2cab1d2bfe0625269763be13a5d650e4): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" podUID="fe591891-51d5-49c8-880a-213703150e27" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.563201 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck"] Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.563319 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:25:59 crc kubenswrapper[4982]: I1205 19:25:59.563691 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.576926 4982 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-nwfp4_openshift-operators_abe3cbe3-f02e-4fc5-81e5-cb02da29d18b_0(23b9813824c1585fb1a4d8be186c0735ad9d95e5cf5789969f7bba6e9a85e4e6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.576985 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-nwfp4_openshift-operators_abe3cbe3-f02e-4fc5-81e5-cb02da29d18b_0(23b9813824c1585fb1a4d8be186c0735ad9d95e5cf5789969f7bba6e9a85e4e6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.577003 4982 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-nwfp4_openshift-operators_abe3cbe3-f02e-4fc5-81e5-cb02da29d18b_0(23b9813824c1585fb1a4d8be186c0735ad9d95e5cf5789969f7bba6e9a85e4e6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.577044 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-nwfp4_openshift-operators(abe3cbe3-f02e-4fc5-81e5-cb02da29d18b)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-nwfp4_openshift-operators(abe3cbe3-f02e-4fc5-81e5-cb02da29d18b)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-nwfp4_openshift-operators_abe3cbe3-f02e-4fc5-81e5-cb02da29d18b_0(23b9813824c1585fb1a4d8be186c0735ad9d95e5cf5789969f7bba6e9a85e4e6): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" podUID="abe3cbe3-f02e-4fc5-81e5-cb02da29d18b" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.583495 4982 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-lz5fb_openshift-operators_15088f20-542f-426d-9e0f-cfb52b660483_0(d87d9e6d724650ae105ad7a58f7d231ef67e4eca475a763b852243663f001420): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.583565 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-lz5fb_openshift-operators_15088f20-542f-426d-9e0f-cfb52b660483_0(d87d9e6d724650ae105ad7a58f7d231ef67e4eca475a763b852243663f001420): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.583593 4982 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-lz5fb_openshift-operators_15088f20-542f-426d-9e0f-cfb52b660483_0(d87d9e6d724650ae105ad7a58f7d231ef67e4eca475a763b852243663f001420): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.583639 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-lz5fb_openshift-operators(15088f20-542f-426d-9e0f-cfb52b660483)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-lz5fb_openshift-operators(15088f20-542f-426d-9e0f-cfb52b660483)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-lz5fb_openshift-operators_15088f20-542f-426d-9e0f-cfb52b660483_0(d87d9e6d724650ae105ad7a58f7d231ef67e4eca475a763b852243663f001420): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" podUID="15088f20-542f-426d-9e0f-cfb52b660483" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.599890 4982 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5mhck_openshift-operators_e2c8beeb-010c-4aac-b407-981a15acaee9_0(c625b7eda43dd063417740c1f29d8123b47c14a1e064e7aac54a8ab6a7e2a3f8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.599949 4982 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5mhck_openshift-operators_e2c8beeb-010c-4aac-b407-981a15acaee9_0(c625b7eda43dd063417740c1f29d8123b47c14a1e064e7aac54a8ab6a7e2a3f8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.599976 4982 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5mhck_openshift-operators_e2c8beeb-010c-4aac-b407-981a15acaee9_0(c625b7eda43dd063417740c1f29d8123b47c14a1e064e7aac54a8ab6a7e2a3f8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:25:59 crc kubenswrapper[4982]: E1205 19:25:59.600046 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-5mhck_openshift-operators(e2c8beeb-010c-4aac-b407-981a15acaee9)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-5mhck_openshift-operators(e2c8beeb-010c-4aac-b407-981a15acaee9)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-5mhck_openshift-operators_e2c8beeb-010c-4aac-b407-981a15acaee9_0(c625b7eda43dd063417740c1f29d8123b47c14a1e064e7aac54a8ab6a7e2a3f8): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" podUID="e2c8beeb-010c-4aac-b407-981a15acaee9" Dec 05 19:26:00 crc kubenswrapper[4982]: I1205 19:26:00.417238 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:26:00 crc kubenswrapper[4982]: I1205 19:26:00.491133 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:26:04 crc kubenswrapper[4982]: I1205 19:26:04.389815 4982 scope.go:117] "RemoveContainer" containerID="3524cbce9c0eb2e2fc04acf3c6d9153a434e42cf73e554bfa8a53608ce416b87" Dec 05 19:26:05 crc kubenswrapper[4982]: I1205 19:26:05.444269 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8ldph_62e7c34f-d411-481e-a5bb-885e7cbd4326/kube-multus/2.log" Dec 05 19:26:05 crc kubenswrapper[4982]: I1205 19:26:05.444510 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8ldph" event={"ID":"62e7c34f-d411-481e-a5bb-885e7cbd4326","Type":"ContainerStarted","Data":"bd0a3786cdd03bc3a0eb7d02fe308e46de39bba26a587f43992d812fe487feac"} Dec 05 19:26:10 crc kubenswrapper[4982]: I1205 19:26:10.389267 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:26:10 crc kubenswrapper[4982]: I1205 19:26:10.390192 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" Dec 05 19:26:10 crc kubenswrapper[4982]: I1205 19:26:10.600524 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw"] Dec 05 19:26:11 crc kubenswrapper[4982]: I1205 19:26:11.484544 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" event={"ID":"1624c766-4ba0-48bf-a7b7-4a8322251e2e","Type":"ContainerStarted","Data":"94e6b062fd580a3db1b5dd1ee4349f0a85f4125d87620499532393f3de473d71"} Dec 05 19:26:12 crc kubenswrapper[4982]: I1205 19:26:12.557120 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:26:12 crc kubenswrapper[4982]: I1205 19:26:12.557474 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:26:13 crc kubenswrapper[4982]: I1205 19:26:13.391479 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:26:13 crc kubenswrapper[4982]: I1205 19:26:13.392283 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" Dec 05 19:26:13 crc kubenswrapper[4982]: I1205 19:26:13.706866 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck"] Dec 05 19:26:13 crc kubenswrapper[4982]: W1205 19:26:13.716781 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode2c8beeb_010c_4aac_b407_981a15acaee9.slice/crio-1a32e73d1103f4d7b3519b801dd876157c7a23d712e126ee12887b715fa8042a WatchSource:0}: Error finding container 1a32e73d1103f4d7b3519b801dd876157c7a23d712e126ee12887b715fa8042a: Status 404 returned error can't find the container with id 1a32e73d1103f4d7b3519b801dd876157c7a23d712e126ee12887b715fa8042a Dec 05 19:26:14 crc kubenswrapper[4982]: I1205 19:26:14.390186 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:26:14 crc kubenswrapper[4982]: I1205 19:26:14.390240 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:26:14 crc kubenswrapper[4982]: I1205 19:26:14.390355 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:26:14 crc kubenswrapper[4982]: I1205 19:26:14.390700 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" Dec 05 19:26:14 crc kubenswrapper[4982]: I1205 19:26:14.390788 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:26:14 crc kubenswrapper[4982]: I1205 19:26:14.391016 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:26:14 crc kubenswrapper[4982]: I1205 19:26:14.506536 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" event={"ID":"e2c8beeb-010c-4aac-b407-981a15acaee9","Type":"ContainerStarted","Data":"1a32e73d1103f4d7b3519b801dd876157c7a23d712e126ee12887b715fa8042a"} Dec 05 19:26:14 crc kubenswrapper[4982]: I1205 19:26:14.672031 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-nwfp4"] Dec 05 19:26:14 crc kubenswrapper[4982]: I1205 19:26:14.717963 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz"] Dec 05 19:26:14 crc kubenswrapper[4982]: W1205 19:26:14.740308 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe591891_51d5_49c8_880a_213703150e27.slice/crio-b3583073eda0e1e221f0634bd6c890fb54de4ea6dd64daf4035281b1aec6f1c2 WatchSource:0}: Error finding container b3583073eda0e1e221f0634bd6c890fb54de4ea6dd64daf4035281b1aec6f1c2: Status 404 returned error can't find the container with id b3583073eda0e1e221f0634bd6c890fb54de4ea6dd64daf4035281b1aec6f1c2 Dec 05 19:26:14 crc kubenswrapper[4982]: I1205 19:26:14.751925 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-lz5fb"] Dec 05 19:26:14 crc kubenswrapper[4982]: W1205 19:26:14.757641 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod15088f20_542f_426d_9e0f_cfb52b660483.slice/crio-993c2d56b201b7c2df4d5c09dff0771562ce2183a8abcc8143b3492d5c125b5d WatchSource:0}: Error finding container 993c2d56b201b7c2df4d5c09dff0771562ce2183a8abcc8143b3492d5c125b5d: Status 404 returned error can't find the container with id 993c2d56b201b7c2df4d5c09dff0771562ce2183a8abcc8143b3492d5c125b5d Dec 05 19:26:15 crc kubenswrapper[4982]: I1205 19:26:15.513309 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" event={"ID":"fe591891-51d5-49c8-880a-213703150e27","Type":"ContainerStarted","Data":"b3583073eda0e1e221f0634bd6c890fb54de4ea6dd64daf4035281b1aec6f1c2"} Dec 05 19:26:15 crc kubenswrapper[4982]: I1205 19:26:15.515661 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" event={"ID":"abe3cbe3-f02e-4fc5-81e5-cb02da29d18b","Type":"ContainerStarted","Data":"f1d9e8f55937d4265d1a8eb1e33c172e26c2b74b49cedb2b89d90e6b321a0513"} Dec 05 19:26:15 crc kubenswrapper[4982]: I1205 19:26:15.517280 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" event={"ID":"15088f20-542f-426d-9e0f-cfb52b660483","Type":"ContainerStarted","Data":"993c2d56b201b7c2df4d5c09dff0771562ce2183a8abcc8143b3492d5c125b5d"} Dec 05 19:26:19 crc kubenswrapper[4982]: I1205 19:26:19.544551 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" event={"ID":"fe591891-51d5-49c8-880a-213703150e27","Type":"ContainerStarted","Data":"52c9387fca06b27331e28c8a7d7ebfbc34075f1195ab59343b3a0be3f0bf372b"} Dec 05 19:26:19 crc kubenswrapper[4982]: I1205 19:26:19.546936 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" event={"ID":"1624c766-4ba0-48bf-a7b7-4a8322251e2e","Type":"ContainerStarted","Data":"ca95c054784f53b0e73183759e7181979572536fe3a0e25771dbe4e7d77f9297"} Dec 05 19:26:19 crc kubenswrapper[4982]: I1205 19:26:19.567818 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz" podStartSLOduration=17.114190787 podStartE2EDuration="21.567798941s" podCreationTimestamp="2025-12-05 19:25:58 +0000 UTC" firstStartedPulling="2025-12-05 19:26:14.742711379 +0000 UTC m=+753.624597374" lastFinishedPulling="2025-12-05 19:26:19.196319533 +0000 UTC m=+758.078205528" observedRunningTime="2025-12-05 19:26:19.562399925 +0000 UTC m=+758.444285930" watchObservedRunningTime="2025-12-05 19:26:19.567798941 +0000 UTC m=+758.449684926" Dec 05 19:26:19 crc kubenswrapper[4982]: I1205 19:26:19.579753 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw" podStartSLOduration=12.99481794 podStartE2EDuration="21.579738282s" podCreationTimestamp="2025-12-05 19:25:58 +0000 UTC" firstStartedPulling="2025-12-05 19:26:10.612964621 +0000 UTC m=+749.494850616" lastFinishedPulling="2025-12-05 19:26:19.197884963 +0000 UTC m=+758.079770958" observedRunningTime="2025-12-05 19:26:19.579191878 +0000 UTC m=+758.461077943" watchObservedRunningTime="2025-12-05 19:26:19.579738282 +0000 UTC m=+758.461624277" Dec 05 19:26:22 crc kubenswrapper[4982]: I1205 19:26:22.555476 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-f8bs9" Dec 05 19:26:22 crc kubenswrapper[4982]: I1205 19:26:22.575384 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" event={"ID":"15088f20-542f-426d-9e0f-cfb52b660483","Type":"ContainerStarted","Data":"fd495b25a328d205e0fd8a31f2229e9fa478e7a5e138cae92c873309368f7318"} Dec 05 19:26:22 crc kubenswrapper[4982]: I1205 19:26:22.575514 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:26:22 crc kubenswrapper[4982]: I1205 19:26:22.576583 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" event={"ID":"e2c8beeb-010c-4aac-b407-981a15acaee9","Type":"ContainerStarted","Data":"f2e3698d50889eb46109036a0d00a58081dd4d5efa457b6115a105643978b5f2"} Dec 05 19:26:22 crc kubenswrapper[4982]: I1205 19:26:22.607802 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-5mhck" podStartSLOduration=16.822780109 podStartE2EDuration="24.607778488s" podCreationTimestamp="2025-12-05 19:25:58 +0000 UTC" firstStartedPulling="2025-12-05 19:26:13.719092018 +0000 UTC m=+752.600978013" lastFinishedPulling="2025-12-05 19:26:21.504090407 +0000 UTC m=+760.385976392" observedRunningTime="2025-12-05 19:26:22.607212334 +0000 UTC m=+761.489098369" watchObservedRunningTime="2025-12-05 19:26:22.607778488 +0000 UTC m=+761.489664493" Dec 05 19:26:22 crc kubenswrapper[4982]: I1205 19:26:22.632033 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" podStartSLOduration=17.896515543 podStartE2EDuration="24.632011799s" podCreationTimestamp="2025-12-05 19:25:58 +0000 UTC" firstStartedPulling="2025-12-05 19:26:14.768818667 +0000 UTC m=+753.650704662" lastFinishedPulling="2025-12-05 19:26:21.504314923 +0000 UTC m=+760.386200918" observedRunningTime="2025-12-05 19:26:22.624276994 +0000 UTC m=+761.506162989" watchObservedRunningTime="2025-12-05 19:26:22.632011799 +0000 UTC m=+761.513897794" Dec 05 19:26:24 crc kubenswrapper[4982]: I1205 19:26:24.593471 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" event={"ID":"abe3cbe3-f02e-4fc5-81e5-cb02da29d18b","Type":"ContainerStarted","Data":"2dd37e0a02bbe5b725bcb2af10aecff25d598c5418d14071842c2adf5aa56f83"} Dec 05 19:26:24 crc kubenswrapper[4982]: I1205 19:26:24.593895 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:26:24 crc kubenswrapper[4982]: I1205 19:26:24.596178 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" Dec 05 19:26:24 crc kubenswrapper[4982]: I1205 19:26:24.617324 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-nwfp4" podStartSLOduration=17.706057051 podStartE2EDuration="26.617306842s" podCreationTimestamp="2025-12-05 19:25:58 +0000 UTC" firstStartedPulling="2025-12-05 19:26:14.716305873 +0000 UTC m=+753.598191868" lastFinishedPulling="2025-12-05 19:26:23.627555654 +0000 UTC m=+762.509441659" observedRunningTime="2025-12-05 19:26:24.612243774 +0000 UTC m=+763.494129859" watchObservedRunningTime="2025-12-05 19:26:24.617306842 +0000 UTC m=+763.499192847" Dec 05 19:26:27 crc kubenswrapper[4982]: I1205 19:26:27.794206 4982 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 19:26:28 crc kubenswrapper[4982]: I1205 19:26:28.797991 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-lz5fb" Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.820634 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-sxnb8"] Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.821861 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-sxnb8" Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.824310 4982 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-8ncs4" Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.824445 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.824541 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.832522 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-skf6w"] Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.833398 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-skf6w" Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.835110 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-sxnb8"] Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.835410 4982 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-rb5q7" Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.844785 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-skf6w"] Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.856106 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wtdnn"] Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.856917 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-wtdnn" Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.860870 4982 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-ph5kh" Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.884101 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wtdnn"] Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.899122 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vx6mv\" (UniqueName: \"kubernetes.io/projected/f1ca36be-fe08-4f98-be99-35f9e8265a79-kube-api-access-vx6mv\") pod \"cert-manager-5b446d88c5-skf6w\" (UID: \"f1ca36be-fe08-4f98-be99-35f9e8265a79\") " pod="cert-manager/cert-manager-5b446d88c5-skf6w" Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.899211 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxs4x\" (UniqueName: \"kubernetes.io/projected/f52e413e-741e-4b30-b1d7-e687e31c16e5-kube-api-access-xxs4x\") pod \"cert-manager-webhook-5655c58dd6-wtdnn\" (UID: \"f52e413e-741e-4b30-b1d7-e687e31c16e5\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wtdnn" Dec 05 19:26:33 crc kubenswrapper[4982]: I1205 19:26:33.899318 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztckc\" (UniqueName: \"kubernetes.io/projected/9825df92-fee9-4a92-b324-62162aacc6fe-kube-api-access-ztckc\") pod \"cert-manager-cainjector-7f985d654d-sxnb8\" (UID: \"9825df92-fee9-4a92-b324-62162aacc6fe\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-sxnb8" Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.000310 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxs4x\" (UniqueName: \"kubernetes.io/projected/f52e413e-741e-4b30-b1d7-e687e31c16e5-kube-api-access-xxs4x\") pod \"cert-manager-webhook-5655c58dd6-wtdnn\" (UID: \"f52e413e-741e-4b30-b1d7-e687e31c16e5\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wtdnn" Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.000404 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztckc\" (UniqueName: \"kubernetes.io/projected/9825df92-fee9-4a92-b324-62162aacc6fe-kube-api-access-ztckc\") pod \"cert-manager-cainjector-7f985d654d-sxnb8\" (UID: \"9825df92-fee9-4a92-b324-62162aacc6fe\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-sxnb8" Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.000496 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vx6mv\" (UniqueName: \"kubernetes.io/projected/f1ca36be-fe08-4f98-be99-35f9e8265a79-kube-api-access-vx6mv\") pod \"cert-manager-5b446d88c5-skf6w\" (UID: \"f1ca36be-fe08-4f98-be99-35f9e8265a79\") " pod="cert-manager/cert-manager-5b446d88c5-skf6w" Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.024143 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztckc\" (UniqueName: \"kubernetes.io/projected/9825df92-fee9-4a92-b324-62162aacc6fe-kube-api-access-ztckc\") pod \"cert-manager-cainjector-7f985d654d-sxnb8\" (UID: \"9825df92-fee9-4a92-b324-62162aacc6fe\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-sxnb8" Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.024248 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxs4x\" (UniqueName: \"kubernetes.io/projected/f52e413e-741e-4b30-b1d7-e687e31c16e5-kube-api-access-xxs4x\") pod \"cert-manager-webhook-5655c58dd6-wtdnn\" (UID: \"f52e413e-741e-4b30-b1d7-e687e31c16e5\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-wtdnn" Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.026000 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vx6mv\" (UniqueName: \"kubernetes.io/projected/f1ca36be-fe08-4f98-be99-35f9e8265a79-kube-api-access-vx6mv\") pod \"cert-manager-5b446d88c5-skf6w\" (UID: \"f1ca36be-fe08-4f98-be99-35f9e8265a79\") " pod="cert-manager/cert-manager-5b446d88c5-skf6w" Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.142258 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-sxnb8" Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.152339 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-skf6w" Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.172519 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-wtdnn" Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.548630 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-sxnb8"] Dec 05 19:26:34 crc kubenswrapper[4982]: W1205 19:26:34.691199 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1ca36be_fe08_4f98_be99_35f9e8265a79.slice/crio-3455b9a85ab0e9673dc895edda1977071f61940c4758f1cd1680b3bc27820dd6 WatchSource:0}: Error finding container 3455b9a85ab0e9673dc895edda1977071f61940c4758f1cd1680b3bc27820dd6: Status 404 returned error can't find the container with id 3455b9a85ab0e9673dc895edda1977071f61940c4758f1cd1680b3bc27820dd6 Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.695782 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-skf6w"] Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.705788 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-wtdnn"] Dec 05 19:26:34 crc kubenswrapper[4982]: W1205 19:26:34.707813 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf52e413e_741e_4b30_b1d7_e687e31c16e5.slice/crio-498b344f4d2a60761c50c51c7ce847a0be1613614f0a6f1f7fc5f6c667719ec7 WatchSource:0}: Error finding container 498b344f4d2a60761c50c51c7ce847a0be1613614f0a6f1f7fc5f6c667719ec7: Status 404 returned error can't find the container with id 498b344f4d2a60761c50c51c7ce847a0be1613614f0a6f1f7fc5f6c667719ec7 Dec 05 19:26:34 crc kubenswrapper[4982]: I1205 19:26:34.707919 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-sxnb8" event={"ID":"9825df92-fee9-4a92-b324-62162aacc6fe","Type":"ContainerStarted","Data":"c2cfbc1c7177476f5fb22887f882c2372573e189aeb61e81867d8911c54911dc"} Dec 05 19:26:35 crc kubenswrapper[4982]: I1205 19:26:35.713963 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-wtdnn" event={"ID":"f52e413e-741e-4b30-b1d7-e687e31c16e5","Type":"ContainerStarted","Data":"498b344f4d2a60761c50c51c7ce847a0be1613614f0a6f1f7fc5f6c667719ec7"} Dec 05 19:26:35 crc kubenswrapper[4982]: I1205 19:26:35.714945 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-skf6w" event={"ID":"f1ca36be-fe08-4f98-be99-35f9e8265a79","Type":"ContainerStarted","Data":"3455b9a85ab0e9673dc895edda1977071f61940c4758f1cd1680b3bc27820dd6"} Dec 05 19:26:36 crc kubenswrapper[4982]: I1205 19:26:36.720829 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-sxnb8" event={"ID":"9825df92-fee9-4a92-b324-62162aacc6fe","Type":"ContainerStarted","Data":"977e7d2d22889575d49ed45959e6bf4adfed278efdb86559a0ad920a786818ef"} Dec 05 19:26:36 crc kubenswrapper[4982]: I1205 19:26:36.761300 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-sxnb8" podStartSLOduration=1.84948229 podStartE2EDuration="3.761281729s" podCreationTimestamp="2025-12-05 19:26:33 +0000 UTC" firstStartedPulling="2025-12-05 19:26:34.556481352 +0000 UTC m=+773.438367347" lastFinishedPulling="2025-12-05 19:26:36.468280781 +0000 UTC m=+775.350166786" observedRunningTime="2025-12-05 19:26:36.759894804 +0000 UTC m=+775.641780819" watchObservedRunningTime="2025-12-05 19:26:36.761281729 +0000 UTC m=+775.643167734" Dec 05 19:26:38 crc kubenswrapper[4982]: I1205 19:26:38.732419 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-skf6w" event={"ID":"f1ca36be-fe08-4f98-be99-35f9e8265a79","Type":"ContainerStarted","Data":"805a306a86bb9ad17fe762debd37dc1eca21ff4bdc5a3f6e88006d722c0d3bb4"} Dec 05 19:26:38 crc kubenswrapper[4982]: I1205 19:26:38.733938 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-wtdnn" event={"ID":"f52e413e-741e-4b30-b1d7-e687e31c16e5","Type":"ContainerStarted","Data":"fb71a8fdf4ba99dc22a4eb05cc1c99b663ec8f391e9cb27be5b1e219fedc00d7"} Dec 05 19:26:38 crc kubenswrapper[4982]: I1205 19:26:38.734408 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-wtdnn" Dec 05 19:26:38 crc kubenswrapper[4982]: I1205 19:26:38.749390 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-skf6w" podStartSLOduration=2.405575533 podStartE2EDuration="5.749371072s" podCreationTimestamp="2025-12-05 19:26:33 +0000 UTC" firstStartedPulling="2025-12-05 19:26:34.701830097 +0000 UTC m=+773.583716092" lastFinishedPulling="2025-12-05 19:26:38.045625636 +0000 UTC m=+776.927511631" observedRunningTime="2025-12-05 19:26:38.748402488 +0000 UTC m=+777.630288483" watchObservedRunningTime="2025-12-05 19:26:38.749371072 +0000 UTC m=+777.631257087" Dec 05 19:26:42 crc kubenswrapper[4982]: I1205 19:26:42.557302 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:26:42 crc kubenswrapper[4982]: I1205 19:26:42.557571 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:26:44 crc kubenswrapper[4982]: I1205 19:26:44.180521 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-wtdnn" Dec 05 19:26:44 crc kubenswrapper[4982]: I1205 19:26:44.207412 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-wtdnn" podStartSLOduration=7.913168096 podStartE2EDuration="11.207330163s" podCreationTimestamp="2025-12-05 19:26:33 +0000 UTC" firstStartedPulling="2025-12-05 19:26:34.709537252 +0000 UTC m=+773.591423247" lastFinishedPulling="2025-12-05 19:26:38.003699319 +0000 UTC m=+776.885585314" observedRunningTime="2025-12-05 19:26:38.768538166 +0000 UTC m=+777.650424161" watchObservedRunningTime="2025-12-05 19:26:44.207330163 +0000 UTC m=+783.089216208" Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.708878 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj"] Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.710368 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.711958 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.725258 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj"] Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.806903 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/778aa6ce-5b87-4f63-b2dc-72daba528154-bundle\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj\" (UID: \"778aa6ce-5b87-4f63-b2dc-72daba528154\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.806953 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pjhl\" (UniqueName: \"kubernetes.io/projected/778aa6ce-5b87-4f63-b2dc-72daba528154-kube-api-access-7pjhl\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj\" (UID: \"778aa6ce-5b87-4f63-b2dc-72daba528154\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.806970 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/778aa6ce-5b87-4f63-b2dc-72daba528154-util\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj\" (UID: \"778aa6ce-5b87-4f63-b2dc-72daba528154\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.907682 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pjhl\" (UniqueName: \"kubernetes.io/projected/778aa6ce-5b87-4f63-b2dc-72daba528154-kube-api-access-7pjhl\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj\" (UID: \"778aa6ce-5b87-4f63-b2dc-72daba528154\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.907722 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/778aa6ce-5b87-4f63-b2dc-72daba528154-util\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj\" (UID: \"778aa6ce-5b87-4f63-b2dc-72daba528154\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.907787 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/778aa6ce-5b87-4f63-b2dc-72daba528154-bundle\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj\" (UID: \"778aa6ce-5b87-4f63-b2dc-72daba528154\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.908274 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/778aa6ce-5b87-4f63-b2dc-72daba528154-bundle\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj\" (UID: \"778aa6ce-5b87-4f63-b2dc-72daba528154\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.908299 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/778aa6ce-5b87-4f63-b2dc-72daba528154-util\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj\" (UID: \"778aa6ce-5b87-4f63-b2dc-72daba528154\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:09 crc kubenswrapper[4982]: I1205 19:27:09.925043 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pjhl\" (UniqueName: \"kubernetes.io/projected/778aa6ce-5b87-4f63-b2dc-72daba528154-kube-api-access-7pjhl\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj\" (UID: \"778aa6ce-5b87-4f63-b2dc-72daba528154\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:10 crc kubenswrapper[4982]: I1205 19:27:10.026886 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:10 crc kubenswrapper[4982]: I1205 19:27:10.241544 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj"] Dec 05 19:27:10 crc kubenswrapper[4982]: I1205 19:27:10.951981 4982 generic.go:334] "Generic (PLEG): container finished" podID="778aa6ce-5b87-4f63-b2dc-72daba528154" containerID="3fd4fa4cfd812c2abfa8904915c202dd3a359b256c397cc8dc919fa060152189" exitCode=0 Dec 05 19:27:10 crc kubenswrapper[4982]: I1205 19:27:10.952072 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" event={"ID":"778aa6ce-5b87-4f63-b2dc-72daba528154","Type":"ContainerDied","Data":"3fd4fa4cfd812c2abfa8904915c202dd3a359b256c397cc8dc919fa060152189"} Dec 05 19:27:10 crc kubenswrapper[4982]: I1205 19:27:10.952287 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" event={"ID":"778aa6ce-5b87-4f63-b2dc-72daba528154","Type":"ContainerStarted","Data":"2eec044c0447c3cfe81fa2579031340fbbf1970d1e9aab60e6f6529db905aa12"} Dec 05 19:27:11 crc kubenswrapper[4982]: I1205 19:27:11.795247 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9lc7b"] Dec 05 19:27:11 crc kubenswrapper[4982]: I1205 19:27:11.796538 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:11 crc kubenswrapper[4982]: I1205 19:27:11.811394 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9lc7b"] Dec 05 19:27:11 crc kubenswrapper[4982]: I1205 19:27:11.905328 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Dec 05 19:27:11 crc kubenswrapper[4982]: I1205 19:27:11.906136 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 05 19:27:11 crc kubenswrapper[4982]: I1205 19:27:11.908324 4982 reflector.go:368] Caches populated for *v1.Secret from object-"minio-dev"/"default-dockercfg-f9kpp" Dec 05 19:27:11 crc kubenswrapper[4982]: I1205 19:27:11.908555 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Dec 05 19:27:11 crc kubenswrapper[4982]: I1205 19:27:11.908699 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Dec 05 19:27:11 crc kubenswrapper[4982]: I1205 19:27:11.910007 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 05 19:27:11 crc kubenswrapper[4982]: I1205 19:27:11.958198 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37909681-9158-4672-88f3-72223bb1704f-catalog-content\") pod \"redhat-operators-9lc7b\" (UID: \"37909681-9158-4672-88f3-72223bb1704f\") " pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:11 crc kubenswrapper[4982]: I1205 19:27:11.959447 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hd8sl\" (UniqueName: \"kubernetes.io/projected/37909681-9158-4672-88f3-72223bb1704f-kube-api-access-hd8sl\") pod \"redhat-operators-9lc7b\" (UID: \"37909681-9158-4672-88f3-72223bb1704f\") " pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:11 crc kubenswrapper[4982]: I1205 19:27:11.959583 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37909681-9158-4672-88f3-72223bb1704f-utilities\") pod \"redhat-operators-9lc7b\" (UID: \"37909681-9158-4672-88f3-72223bb1704f\") " pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.061070 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sfrch\" (UniqueName: \"kubernetes.io/projected/9ce0bd90-82ff-4539-885c-300e00889f28-kube-api-access-sfrch\") pod \"minio\" (UID: \"9ce0bd90-82ff-4539-885c-300e00889f28\") " pod="minio-dev/minio" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.061203 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37909681-9158-4672-88f3-72223bb1704f-catalog-content\") pod \"redhat-operators-9lc7b\" (UID: \"37909681-9158-4672-88f3-72223bb1704f\") " pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.061253 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-bd824d2d-d64e-4374-a012-2211018eaea4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bd824d2d-d64e-4374-a012-2211018eaea4\") pod \"minio\" (UID: \"9ce0bd90-82ff-4539-885c-300e00889f28\") " pod="minio-dev/minio" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.061283 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hd8sl\" (UniqueName: \"kubernetes.io/projected/37909681-9158-4672-88f3-72223bb1704f-kube-api-access-hd8sl\") pod \"redhat-operators-9lc7b\" (UID: \"37909681-9158-4672-88f3-72223bb1704f\") " pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.061314 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37909681-9158-4672-88f3-72223bb1704f-utilities\") pod \"redhat-operators-9lc7b\" (UID: \"37909681-9158-4672-88f3-72223bb1704f\") " pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.061783 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37909681-9158-4672-88f3-72223bb1704f-catalog-content\") pod \"redhat-operators-9lc7b\" (UID: \"37909681-9158-4672-88f3-72223bb1704f\") " pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.061980 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37909681-9158-4672-88f3-72223bb1704f-utilities\") pod \"redhat-operators-9lc7b\" (UID: \"37909681-9158-4672-88f3-72223bb1704f\") " pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.082929 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hd8sl\" (UniqueName: \"kubernetes.io/projected/37909681-9158-4672-88f3-72223bb1704f-kube-api-access-hd8sl\") pod \"redhat-operators-9lc7b\" (UID: \"37909681-9158-4672-88f3-72223bb1704f\") " pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.163234 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sfrch\" (UniqueName: \"kubernetes.io/projected/9ce0bd90-82ff-4539-885c-300e00889f28-kube-api-access-sfrch\") pod \"minio\" (UID: \"9ce0bd90-82ff-4539-885c-300e00889f28\") " pod="minio-dev/minio" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.163330 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-bd824d2d-d64e-4374-a012-2211018eaea4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bd824d2d-d64e-4374-a012-2211018eaea4\") pod \"minio\" (UID: \"9ce0bd90-82ff-4539-885c-300e00889f28\") " pod="minio-dev/minio" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.167517 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.167545 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-bd824d2d-d64e-4374-a012-2211018eaea4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bd824d2d-d64e-4374-a012-2211018eaea4\") pod \"minio\" (UID: \"9ce0bd90-82ff-4539-885c-300e00889f28\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/356f0f899d250980cb8025664b6814e59141eb7f54b88b73b9438082327ef43e/globalmount\"" pod="minio-dev/minio" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.175458 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.185647 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sfrch\" (UniqueName: \"kubernetes.io/projected/9ce0bd90-82ff-4539-885c-300e00889f28-kube-api-access-sfrch\") pod \"minio\" (UID: \"9ce0bd90-82ff-4539-885c-300e00889f28\") " pod="minio-dev/minio" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.195889 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-bd824d2d-d64e-4374-a012-2211018eaea4\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-bd824d2d-d64e-4374-a012-2211018eaea4\") pod \"minio\" (UID: \"9ce0bd90-82ff-4539-885c-300e00889f28\") " pod="minio-dev/minio" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.230629 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.436785 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9lc7b"] Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.463661 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.557454 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.557524 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.557577 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.558161 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"57e1b3de34ce27c53857e515661add0f1cc0b0ab673485f39e540b3920f02c13"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.558215 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://57e1b3de34ce27c53857e515661add0f1cc0b0ab673485f39e540b3920f02c13" gracePeriod=600 Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.963953 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lc7b" event={"ID":"37909681-9158-4672-88f3-72223bb1704f","Type":"ContainerStarted","Data":"a9d957726b935b1e1b814667bcb98a7f57f6350fa4a654028266f067f27cdf02"} Dec 05 19:27:12 crc kubenswrapper[4982]: I1205 19:27:12.965835 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"9ce0bd90-82ff-4539-885c-300e00889f28","Type":"ContainerStarted","Data":"e375e8b94357e355a8964045100d2d61160c98f93480dda4a3d5fe669f5d197f"} Dec 05 19:27:13 crc kubenswrapper[4982]: I1205 19:27:13.974215 4982 generic.go:334] "Generic (PLEG): container finished" podID="37909681-9158-4672-88f3-72223bb1704f" containerID="09c6476ff05e5e88d8ed6b875295dec0496d9ba65ed14449d9ead37f2fc02566" exitCode=0 Dec 05 19:27:13 crc kubenswrapper[4982]: I1205 19:27:13.975107 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lc7b" event={"ID":"37909681-9158-4672-88f3-72223bb1704f","Type":"ContainerDied","Data":"09c6476ff05e5e88d8ed6b875295dec0496d9ba65ed14449d9ead37f2fc02566"} Dec 05 19:27:13 crc kubenswrapper[4982]: I1205 19:27:13.984227 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="57e1b3de34ce27c53857e515661add0f1cc0b0ab673485f39e540b3920f02c13" exitCode=0 Dec 05 19:27:13 crc kubenswrapper[4982]: I1205 19:27:13.984302 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"57e1b3de34ce27c53857e515661add0f1cc0b0ab673485f39e540b3920f02c13"} Dec 05 19:27:13 crc kubenswrapper[4982]: I1205 19:27:13.984330 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"a3c43aed6bce4bef46a60d053a18694c6e39d12312a98b59d3e2035ce4e4dc6e"} Dec 05 19:27:13 crc kubenswrapper[4982]: I1205 19:27:13.984347 4982 scope.go:117] "RemoveContainer" containerID="92566dcb46ab9bedfb8463976d7e833b3131cb3485bc9bee7d010a0908d75c66" Dec 05 19:27:13 crc kubenswrapper[4982]: I1205 19:27:13.987026 4982 generic.go:334] "Generic (PLEG): container finished" podID="778aa6ce-5b87-4f63-b2dc-72daba528154" containerID="7927e9801d6647c3ce6604e248591bcef896f25f33892947684b848bd097f88e" exitCode=0 Dec 05 19:27:13 crc kubenswrapper[4982]: I1205 19:27:13.987091 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" event={"ID":"778aa6ce-5b87-4f63-b2dc-72daba528154","Type":"ContainerDied","Data":"7927e9801d6647c3ce6604e248591bcef896f25f33892947684b848bd097f88e"} Dec 05 19:27:15 crc kubenswrapper[4982]: I1205 19:27:15.000667 4982 generic.go:334] "Generic (PLEG): container finished" podID="778aa6ce-5b87-4f63-b2dc-72daba528154" containerID="73579aefc47b95efc683909d49572257f6a410054fbe2627f2c1012048960c7c" exitCode=0 Dec 05 19:27:15 crc kubenswrapper[4982]: I1205 19:27:15.000762 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" event={"ID":"778aa6ce-5b87-4f63-b2dc-72daba528154","Type":"ContainerDied","Data":"73579aefc47b95efc683909d49572257f6a410054fbe2627f2c1012048960c7c"} Dec 05 19:27:16 crc kubenswrapper[4982]: I1205 19:27:16.286957 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:16 crc kubenswrapper[4982]: I1205 19:27:16.427312 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/778aa6ce-5b87-4f63-b2dc-72daba528154-bundle\") pod \"778aa6ce-5b87-4f63-b2dc-72daba528154\" (UID: \"778aa6ce-5b87-4f63-b2dc-72daba528154\") " Dec 05 19:27:16 crc kubenswrapper[4982]: I1205 19:27:16.427971 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7pjhl\" (UniqueName: \"kubernetes.io/projected/778aa6ce-5b87-4f63-b2dc-72daba528154-kube-api-access-7pjhl\") pod \"778aa6ce-5b87-4f63-b2dc-72daba528154\" (UID: \"778aa6ce-5b87-4f63-b2dc-72daba528154\") " Dec 05 19:27:16 crc kubenswrapper[4982]: I1205 19:27:16.428054 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/778aa6ce-5b87-4f63-b2dc-72daba528154-util\") pod \"778aa6ce-5b87-4f63-b2dc-72daba528154\" (UID: \"778aa6ce-5b87-4f63-b2dc-72daba528154\") " Dec 05 19:27:16 crc kubenswrapper[4982]: I1205 19:27:16.437072 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/778aa6ce-5b87-4f63-b2dc-72daba528154-kube-api-access-7pjhl" (OuterVolumeSpecName: "kube-api-access-7pjhl") pod "778aa6ce-5b87-4f63-b2dc-72daba528154" (UID: "778aa6ce-5b87-4f63-b2dc-72daba528154"). InnerVolumeSpecName "kube-api-access-7pjhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:27:16 crc kubenswrapper[4982]: I1205 19:27:16.439065 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/778aa6ce-5b87-4f63-b2dc-72daba528154-util" (OuterVolumeSpecName: "util") pod "778aa6ce-5b87-4f63-b2dc-72daba528154" (UID: "778aa6ce-5b87-4f63-b2dc-72daba528154"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:27:16 crc kubenswrapper[4982]: I1205 19:27:16.440072 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/778aa6ce-5b87-4f63-b2dc-72daba528154-bundle" (OuterVolumeSpecName: "bundle") pod "778aa6ce-5b87-4f63-b2dc-72daba528154" (UID: "778aa6ce-5b87-4f63-b2dc-72daba528154"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:27:16 crc kubenswrapper[4982]: I1205 19:27:16.528988 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7pjhl\" (UniqueName: \"kubernetes.io/projected/778aa6ce-5b87-4f63-b2dc-72daba528154-kube-api-access-7pjhl\") on node \"crc\" DevicePath \"\"" Dec 05 19:27:16 crc kubenswrapper[4982]: I1205 19:27:16.529205 4982 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/778aa6ce-5b87-4f63-b2dc-72daba528154-util\") on node \"crc\" DevicePath \"\"" Dec 05 19:27:16 crc kubenswrapper[4982]: I1205 19:27:16.529302 4982 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/778aa6ce-5b87-4f63-b2dc-72daba528154-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:27:17 crc kubenswrapper[4982]: I1205 19:27:17.014294 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" event={"ID":"778aa6ce-5b87-4f63-b2dc-72daba528154","Type":"ContainerDied","Data":"2eec044c0447c3cfe81fa2579031340fbbf1970d1e9aab60e6f6529db905aa12"} Dec 05 19:27:17 crc kubenswrapper[4982]: I1205 19:27:17.014587 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2eec044c0447c3cfe81fa2579031340fbbf1970d1e9aab60e6f6529db905aa12" Dec 05 19:27:17 crc kubenswrapper[4982]: I1205 19:27:17.014317 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj" Dec 05 19:27:17 crc kubenswrapper[4982]: I1205 19:27:17.016284 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"9ce0bd90-82ff-4539-885c-300e00889f28","Type":"ContainerStarted","Data":"700d7ffd93fa3c597331057eb68ff358580f50dc00d291147149fa278c94d978"} Dec 05 19:27:17 crc kubenswrapper[4982]: I1205 19:27:17.032474 4982 generic.go:334] "Generic (PLEG): container finished" podID="37909681-9158-4672-88f3-72223bb1704f" containerID="365f35b9bb41ec06ea6d0077ebb6b28c42b7f9ce11d4400438423cd4812fee2c" exitCode=0 Dec 05 19:27:17 crc kubenswrapper[4982]: I1205 19:27:17.032628 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lc7b" event={"ID":"37909681-9158-4672-88f3-72223bb1704f","Type":"ContainerDied","Data":"365f35b9bb41ec06ea6d0077ebb6b28c42b7f9ce11d4400438423cd4812fee2c"} Dec 05 19:27:17 crc kubenswrapper[4982]: I1205 19:27:17.047795 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=4.586021907 podStartE2EDuration="8.04777358s" podCreationTimestamp="2025-12-05 19:27:09 +0000 UTC" firstStartedPulling="2025-12-05 19:27:12.477722069 +0000 UTC m=+811.359608084" lastFinishedPulling="2025-12-05 19:27:15.939473722 +0000 UTC m=+814.821359757" observedRunningTime="2025-12-05 19:27:17.040302131 +0000 UTC m=+815.922188126" watchObservedRunningTime="2025-12-05 19:27:17.04777358 +0000 UTC m=+815.929659585" Dec 05 19:27:18 crc kubenswrapper[4982]: I1205 19:27:18.041257 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lc7b" event={"ID":"37909681-9158-4672-88f3-72223bb1704f","Type":"ContainerStarted","Data":"b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0"} Dec 05 19:27:18 crc kubenswrapper[4982]: I1205 19:27:18.057831 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9lc7b" podStartSLOduration=3.840979173 podStartE2EDuration="7.05781572s" podCreationTimestamp="2025-12-05 19:27:11 +0000 UTC" firstStartedPulling="2025-12-05 19:27:14.227076652 +0000 UTC m=+813.108962647" lastFinishedPulling="2025-12-05 19:27:17.443913159 +0000 UTC m=+816.325799194" observedRunningTime="2025-12-05 19:27:18.054541637 +0000 UTC m=+816.936427642" watchObservedRunningTime="2025-12-05 19:27:18.05781572 +0000 UTC m=+816.939701705" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.176055 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.176731 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.308871 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv"] Dec 05 19:27:22 crc kubenswrapper[4982]: E1205 19:27:22.309089 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="778aa6ce-5b87-4f63-b2dc-72daba528154" containerName="pull" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.309105 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="778aa6ce-5b87-4f63-b2dc-72daba528154" containerName="pull" Dec 05 19:27:22 crc kubenswrapper[4982]: E1205 19:27:22.309114 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="778aa6ce-5b87-4f63-b2dc-72daba528154" containerName="extract" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.309121 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="778aa6ce-5b87-4f63-b2dc-72daba528154" containerName="extract" Dec 05 19:27:22 crc kubenswrapper[4982]: E1205 19:27:22.309129 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="778aa6ce-5b87-4f63-b2dc-72daba528154" containerName="util" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.309135 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="778aa6ce-5b87-4f63-b2dc-72daba528154" containerName="util" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.309256 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="778aa6ce-5b87-4f63-b2dc-72daba528154" containerName="extract" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.309824 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.311871 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.312258 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.315355 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.315691 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.315778 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-t4d84" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.315866 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.337589 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv"] Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.412458 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/362ad9c9-a652-4965-9d36-10c0332bff02-apiservice-cert\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.412551 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/362ad9c9-a652-4965-9d36-10c0332bff02-manager-config\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.412597 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/362ad9c9-a652-4965-9d36-10c0332bff02-webhook-cert\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.412630 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd7wn\" (UniqueName: \"kubernetes.io/projected/362ad9c9-a652-4965-9d36-10c0332bff02-kube-api-access-wd7wn\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.412657 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/362ad9c9-a652-4965-9d36-10c0332bff02-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.513607 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/362ad9c9-a652-4965-9d36-10c0332bff02-apiservice-cert\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.513680 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/362ad9c9-a652-4965-9d36-10c0332bff02-manager-config\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.513739 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/362ad9c9-a652-4965-9d36-10c0332bff02-webhook-cert\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.513765 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd7wn\" (UniqueName: \"kubernetes.io/projected/362ad9c9-a652-4965-9d36-10c0332bff02-kube-api-access-wd7wn\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.513794 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/362ad9c9-a652-4965-9d36-10c0332bff02-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.693207 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/362ad9c9-a652-4965-9d36-10c0332bff02-manager-config\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.697623 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/362ad9c9-a652-4965-9d36-10c0332bff02-apiservice-cert\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.698356 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/362ad9c9-a652-4965-9d36-10c0332bff02-webhook-cert\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.698677 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd7wn\" (UniqueName: \"kubernetes.io/projected/362ad9c9-a652-4965-9d36-10c0332bff02-kube-api-access-wd7wn\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.703678 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/362ad9c9-a652-4965-9d36-10c0332bff02-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-684f549574-q5qzv\" (UID: \"362ad9c9-a652-4965-9d36-10c0332bff02\") " pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:22 crc kubenswrapper[4982]: I1205 19:27:22.926305 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:23 crc kubenswrapper[4982]: I1205 19:27:23.185521 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv"] Dec 05 19:27:23 crc kubenswrapper[4982]: I1205 19:27:23.224984 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9lc7b" podUID="37909681-9158-4672-88f3-72223bb1704f" containerName="registry-server" probeResult="failure" output=< Dec 05 19:27:23 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Dec 05 19:27:23 crc kubenswrapper[4982]: > Dec 05 19:27:24 crc kubenswrapper[4982]: I1205 19:27:24.070822 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" event={"ID":"362ad9c9-a652-4965-9d36-10c0332bff02","Type":"ContainerStarted","Data":"ea7df56f063db37bf44a1a8f3930cfd2cf705fad3089a655502e12edb846d09d"} Dec 05 19:27:29 crc kubenswrapper[4982]: I1205 19:27:29.099602 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" event={"ID":"362ad9c9-a652-4965-9d36-10c0332bff02","Type":"ContainerStarted","Data":"edaa81b5ceab18b307176fec40c03fbe2a4171814a19600409b64e895ed62e18"} Dec 05 19:27:32 crc kubenswrapper[4982]: I1205 19:27:32.227574 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:32 crc kubenswrapper[4982]: I1205 19:27:32.273763 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:34 crc kubenswrapper[4982]: I1205 19:27:34.582999 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9lc7b"] Dec 05 19:27:34 crc kubenswrapper[4982]: I1205 19:27:34.584141 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9lc7b" podUID="37909681-9158-4672-88f3-72223bb1704f" containerName="registry-server" containerID="cri-o://b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0" gracePeriod=2 Dec 05 19:27:34 crc kubenswrapper[4982]: I1205 19:27:34.903761 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.043338 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37909681-9158-4672-88f3-72223bb1704f-utilities\") pod \"37909681-9158-4672-88f3-72223bb1704f\" (UID: \"37909681-9158-4672-88f3-72223bb1704f\") " Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.043437 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37909681-9158-4672-88f3-72223bb1704f-catalog-content\") pod \"37909681-9158-4672-88f3-72223bb1704f\" (UID: \"37909681-9158-4672-88f3-72223bb1704f\") " Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.043458 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hd8sl\" (UniqueName: \"kubernetes.io/projected/37909681-9158-4672-88f3-72223bb1704f-kube-api-access-hd8sl\") pod \"37909681-9158-4672-88f3-72223bb1704f\" (UID: \"37909681-9158-4672-88f3-72223bb1704f\") " Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.044224 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37909681-9158-4672-88f3-72223bb1704f-utilities" (OuterVolumeSpecName: "utilities") pod "37909681-9158-4672-88f3-72223bb1704f" (UID: "37909681-9158-4672-88f3-72223bb1704f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.054258 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37909681-9158-4672-88f3-72223bb1704f-kube-api-access-hd8sl" (OuterVolumeSpecName: "kube-api-access-hd8sl") pod "37909681-9158-4672-88f3-72223bb1704f" (UID: "37909681-9158-4672-88f3-72223bb1704f"). InnerVolumeSpecName "kube-api-access-hd8sl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.144697 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37909681-9158-4672-88f3-72223bb1704f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.144729 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hd8sl\" (UniqueName: \"kubernetes.io/projected/37909681-9158-4672-88f3-72223bb1704f-kube-api-access-hd8sl\") on node \"crc\" DevicePath \"\"" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.152791 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" event={"ID":"362ad9c9-a652-4965-9d36-10c0332bff02","Type":"ContainerStarted","Data":"5c4a24309dcfe8190f1af4bf4a6ec591c47f03080972375d4630bfebb869d85d"} Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.153591 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.156901 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.157649 4982 generic.go:334] "Generic (PLEG): container finished" podID="37909681-9158-4672-88f3-72223bb1704f" containerID="b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0" exitCode=0 Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.157685 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lc7b" event={"ID":"37909681-9158-4672-88f3-72223bb1704f","Type":"ContainerDied","Data":"b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0"} Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.157709 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9lc7b" event={"ID":"37909681-9158-4672-88f3-72223bb1704f","Type":"ContainerDied","Data":"a9d957726b935b1e1b814667bcb98a7f57f6350fa4a654028266f067f27cdf02"} Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.157732 4982 scope.go:117] "RemoveContainer" containerID="b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.157748 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9lc7b" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.175835 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-684f549574-q5qzv" podStartSLOduration=1.904078041 podStartE2EDuration="13.175816754s" podCreationTimestamp="2025-12-05 19:27:22 +0000 UTC" firstStartedPulling="2025-12-05 19:27:23.194371505 +0000 UTC m=+822.076257500" lastFinishedPulling="2025-12-05 19:27:34.466110218 +0000 UTC m=+833.347996213" observedRunningTime="2025-12-05 19:27:35.173711371 +0000 UTC m=+834.055597406" watchObservedRunningTime="2025-12-05 19:27:35.175816754 +0000 UTC m=+834.057702759" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.185403 4982 scope.go:117] "RemoveContainer" containerID="365f35b9bb41ec06ea6d0077ebb6b28c42b7f9ce11d4400438423cd4812fee2c" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.199327 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37909681-9158-4672-88f3-72223bb1704f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37909681-9158-4672-88f3-72223bb1704f" (UID: "37909681-9158-4672-88f3-72223bb1704f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.208685 4982 scope.go:117] "RemoveContainer" containerID="09c6476ff05e5e88d8ed6b875295dec0496d9ba65ed14449d9ead37f2fc02566" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.243004 4982 scope.go:117] "RemoveContainer" containerID="b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0" Dec 05 19:27:35 crc kubenswrapper[4982]: E1205 19:27:35.243451 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0\": container with ID starting with b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0 not found: ID does not exist" containerID="b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.243505 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0"} err="failed to get container status \"b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0\": rpc error: code = NotFound desc = could not find container \"b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0\": container with ID starting with b281735f18c1f7e439b47d9d2a2cecb9ea28866ed3887495ea891f2d4d69efa0 not found: ID does not exist" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.243541 4982 scope.go:117] "RemoveContainer" containerID="365f35b9bb41ec06ea6d0077ebb6b28c42b7f9ce11d4400438423cd4812fee2c" Dec 05 19:27:35 crc kubenswrapper[4982]: E1205 19:27:35.244108 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"365f35b9bb41ec06ea6d0077ebb6b28c42b7f9ce11d4400438423cd4812fee2c\": container with ID starting with 365f35b9bb41ec06ea6d0077ebb6b28c42b7f9ce11d4400438423cd4812fee2c not found: ID does not exist" containerID="365f35b9bb41ec06ea6d0077ebb6b28c42b7f9ce11d4400438423cd4812fee2c" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.244555 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"365f35b9bb41ec06ea6d0077ebb6b28c42b7f9ce11d4400438423cd4812fee2c"} err="failed to get container status \"365f35b9bb41ec06ea6d0077ebb6b28c42b7f9ce11d4400438423cd4812fee2c\": rpc error: code = NotFound desc = could not find container \"365f35b9bb41ec06ea6d0077ebb6b28c42b7f9ce11d4400438423cd4812fee2c\": container with ID starting with 365f35b9bb41ec06ea6d0077ebb6b28c42b7f9ce11d4400438423cd4812fee2c not found: ID does not exist" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.244679 4982 scope.go:117] "RemoveContainer" containerID="09c6476ff05e5e88d8ed6b875295dec0496d9ba65ed14449d9ead37f2fc02566" Dec 05 19:27:35 crc kubenswrapper[4982]: E1205 19:27:35.245185 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09c6476ff05e5e88d8ed6b875295dec0496d9ba65ed14449d9ead37f2fc02566\": container with ID starting with 09c6476ff05e5e88d8ed6b875295dec0496d9ba65ed14449d9ead37f2fc02566 not found: ID does not exist" containerID="09c6476ff05e5e88d8ed6b875295dec0496d9ba65ed14449d9ead37f2fc02566" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.245309 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09c6476ff05e5e88d8ed6b875295dec0496d9ba65ed14449d9ead37f2fc02566"} err="failed to get container status \"09c6476ff05e5e88d8ed6b875295dec0496d9ba65ed14449d9ead37f2fc02566\": rpc error: code = NotFound desc = could not find container \"09c6476ff05e5e88d8ed6b875295dec0496d9ba65ed14449d9ead37f2fc02566\": container with ID starting with 09c6476ff05e5e88d8ed6b875295dec0496d9ba65ed14449d9ead37f2fc02566 not found: ID does not exist" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.245669 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37909681-9158-4672-88f3-72223bb1704f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.484049 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9lc7b"] Dec 05 19:27:35 crc kubenswrapper[4982]: I1205 19:27:35.489826 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9lc7b"] Dec 05 19:27:37 crc kubenswrapper[4982]: I1205 19:27:37.396715 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37909681-9158-4672-88f3-72223bb1704f" path="/var/lib/kubelet/pods/37909681-9158-4672-88f3-72223bb1704f/volumes" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.255240 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7"] Dec 05 19:28:08 crc kubenswrapper[4982]: E1205 19:28:08.255887 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37909681-9158-4672-88f3-72223bb1704f" containerName="registry-server" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.255898 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="37909681-9158-4672-88f3-72223bb1704f" containerName="registry-server" Dec 05 19:28:08 crc kubenswrapper[4982]: E1205 19:28:08.255913 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37909681-9158-4672-88f3-72223bb1704f" containerName="extract-content" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.255919 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="37909681-9158-4672-88f3-72223bb1704f" containerName="extract-content" Dec 05 19:28:08 crc kubenswrapper[4982]: E1205 19:28:08.255935 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37909681-9158-4672-88f3-72223bb1704f" containerName="extract-utilities" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.255942 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="37909681-9158-4672-88f3-72223bb1704f" containerName="extract-utilities" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.256027 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="37909681-9158-4672-88f3-72223bb1704f" containerName="registry-server" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.256753 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.262434 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.270004 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7"] Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.311461 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ef522e3-7448-4261-b647-d5bb5a547dc7-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7\" (UID: \"4ef522e3-7448-4261-b647-d5bb5a547dc7\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.311511 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ef522e3-7448-4261-b647-d5bb5a547dc7-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7\" (UID: \"4ef522e3-7448-4261-b647-d5bb5a547dc7\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.311541 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tnvc\" (UniqueName: \"kubernetes.io/projected/4ef522e3-7448-4261-b647-d5bb5a547dc7-kube-api-access-4tnvc\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7\" (UID: \"4ef522e3-7448-4261-b647-d5bb5a547dc7\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.413549 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ef522e3-7448-4261-b647-d5bb5a547dc7-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7\" (UID: \"4ef522e3-7448-4261-b647-d5bb5a547dc7\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.413632 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tnvc\" (UniqueName: \"kubernetes.io/projected/4ef522e3-7448-4261-b647-d5bb5a547dc7-kube-api-access-4tnvc\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7\" (UID: \"4ef522e3-7448-4261-b647-d5bb5a547dc7\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.413727 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ef522e3-7448-4261-b647-d5bb5a547dc7-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7\" (UID: \"4ef522e3-7448-4261-b647-d5bb5a547dc7\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.414182 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ef522e3-7448-4261-b647-d5bb5a547dc7-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7\" (UID: \"4ef522e3-7448-4261-b647-d5bb5a547dc7\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.414349 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ef522e3-7448-4261-b647-d5bb5a547dc7-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7\" (UID: \"4ef522e3-7448-4261-b647-d5bb5a547dc7\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.439484 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tnvc\" (UniqueName: \"kubernetes.io/projected/4ef522e3-7448-4261-b647-d5bb5a547dc7-kube-api-access-4tnvc\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7\" (UID: \"4ef522e3-7448-4261-b647-d5bb5a547dc7\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:08 crc kubenswrapper[4982]: I1205 19:28:08.571990 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:09 crc kubenswrapper[4982]: I1205 19:28:09.066285 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7"] Dec 05 19:28:09 crc kubenswrapper[4982]: I1205 19:28:09.387632 4982 generic.go:334] "Generic (PLEG): container finished" podID="4ef522e3-7448-4261-b647-d5bb5a547dc7" containerID="9d1d837ea56f59112c5e4eca74923589c1073c02ead5734f82a4ee2de0f8b590" exitCode=0 Dec 05 19:28:09 crc kubenswrapper[4982]: I1205 19:28:09.387729 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" event={"ID":"4ef522e3-7448-4261-b647-d5bb5a547dc7","Type":"ContainerDied","Data":"9d1d837ea56f59112c5e4eca74923589c1073c02ead5734f82a4ee2de0f8b590"} Dec 05 19:28:09 crc kubenswrapper[4982]: I1205 19:28:09.387963 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" event={"ID":"4ef522e3-7448-4261-b647-d5bb5a547dc7","Type":"ContainerStarted","Data":"40c139734dc3b9e28befc119abeb290b9e41de1891c1d81d11187c70f222cd7a"} Dec 05 19:28:10 crc kubenswrapper[4982]: I1205 19:28:10.395255 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" event={"ID":"4ef522e3-7448-4261-b647-d5bb5a547dc7","Type":"ContainerStarted","Data":"468f06608a26ac2c1c1c7fd5faeee5a629f6d747ed984e67b5500e938e828ece"} Dec 05 19:28:11 crc kubenswrapper[4982]: I1205 19:28:11.405042 4982 generic.go:334] "Generic (PLEG): container finished" podID="4ef522e3-7448-4261-b647-d5bb5a547dc7" containerID="468f06608a26ac2c1c1c7fd5faeee5a629f6d747ed984e67b5500e938e828ece" exitCode=0 Dec 05 19:28:11 crc kubenswrapper[4982]: I1205 19:28:11.405119 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" event={"ID":"4ef522e3-7448-4261-b647-d5bb5a547dc7","Type":"ContainerDied","Data":"468f06608a26ac2c1c1c7fd5faeee5a629f6d747ed984e67b5500e938e828ece"} Dec 05 19:28:12 crc kubenswrapper[4982]: I1205 19:28:12.413023 4982 generic.go:334] "Generic (PLEG): container finished" podID="4ef522e3-7448-4261-b647-d5bb5a547dc7" containerID="f36d96e1c8bd8db388f742982a14fcbb18d4ee7bc1c7032abacb30ab878b24e6" exitCode=0 Dec 05 19:28:12 crc kubenswrapper[4982]: I1205 19:28:12.413070 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" event={"ID":"4ef522e3-7448-4261-b647-d5bb5a547dc7","Type":"ContainerDied","Data":"f36d96e1c8bd8db388f742982a14fcbb18d4ee7bc1c7032abacb30ab878b24e6"} Dec 05 19:28:13 crc kubenswrapper[4982]: I1205 19:28:13.682089 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:13 crc kubenswrapper[4982]: I1205 19:28:13.804892 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ef522e3-7448-4261-b647-d5bb5a547dc7-bundle\") pod \"4ef522e3-7448-4261-b647-d5bb5a547dc7\" (UID: \"4ef522e3-7448-4261-b647-d5bb5a547dc7\") " Dec 05 19:28:13 crc kubenswrapper[4982]: I1205 19:28:13.804963 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tnvc\" (UniqueName: \"kubernetes.io/projected/4ef522e3-7448-4261-b647-d5bb5a547dc7-kube-api-access-4tnvc\") pod \"4ef522e3-7448-4261-b647-d5bb5a547dc7\" (UID: \"4ef522e3-7448-4261-b647-d5bb5a547dc7\") " Dec 05 19:28:13 crc kubenswrapper[4982]: I1205 19:28:13.805018 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ef522e3-7448-4261-b647-d5bb5a547dc7-util\") pod \"4ef522e3-7448-4261-b647-d5bb5a547dc7\" (UID: \"4ef522e3-7448-4261-b647-d5bb5a547dc7\") " Dec 05 19:28:13 crc kubenswrapper[4982]: I1205 19:28:13.805444 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ef522e3-7448-4261-b647-d5bb5a547dc7-bundle" (OuterVolumeSpecName: "bundle") pod "4ef522e3-7448-4261-b647-d5bb5a547dc7" (UID: "4ef522e3-7448-4261-b647-d5bb5a547dc7"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:28:13 crc kubenswrapper[4982]: I1205 19:28:13.813658 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ef522e3-7448-4261-b647-d5bb5a547dc7-kube-api-access-4tnvc" (OuterVolumeSpecName: "kube-api-access-4tnvc") pod "4ef522e3-7448-4261-b647-d5bb5a547dc7" (UID: "4ef522e3-7448-4261-b647-d5bb5a547dc7"). InnerVolumeSpecName "kube-api-access-4tnvc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:28:13 crc kubenswrapper[4982]: I1205 19:28:13.821285 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ef522e3-7448-4261-b647-d5bb5a547dc7-util" (OuterVolumeSpecName: "util") pod "4ef522e3-7448-4261-b647-d5bb5a547dc7" (UID: "4ef522e3-7448-4261-b647-d5bb5a547dc7"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:28:13 crc kubenswrapper[4982]: I1205 19:28:13.907017 4982 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ef522e3-7448-4261-b647-d5bb5a547dc7-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:28:13 crc kubenswrapper[4982]: I1205 19:28:13.907123 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tnvc\" (UniqueName: \"kubernetes.io/projected/4ef522e3-7448-4261-b647-d5bb5a547dc7-kube-api-access-4tnvc\") on node \"crc\" DevicePath \"\"" Dec 05 19:28:13 crc kubenswrapper[4982]: I1205 19:28:13.907184 4982 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ef522e3-7448-4261-b647-d5bb5a547dc7-util\") on node \"crc\" DevicePath \"\"" Dec 05 19:28:14 crc kubenswrapper[4982]: I1205 19:28:14.429395 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" event={"ID":"4ef522e3-7448-4261-b647-d5bb5a547dc7","Type":"ContainerDied","Data":"40c139734dc3b9e28befc119abeb290b9e41de1891c1d81d11187c70f222cd7a"} Dec 05 19:28:14 crc kubenswrapper[4982]: I1205 19:28:14.429435 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="40c139734dc3b9e28befc119abeb290b9e41de1891c1d81d11187c70f222cd7a" Dec 05 19:28:14 crc kubenswrapper[4982]: I1205 19:28:14.429500 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.105268 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-s9fkp"] Dec 05 19:28:20 crc kubenswrapper[4982]: E1205 19:28:20.105782 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ef522e3-7448-4261-b647-d5bb5a547dc7" containerName="pull" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.105795 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ef522e3-7448-4261-b647-d5bb5a547dc7" containerName="pull" Dec 05 19:28:20 crc kubenswrapper[4982]: E1205 19:28:20.105813 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ef522e3-7448-4261-b647-d5bb5a547dc7" containerName="util" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.105818 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ef522e3-7448-4261-b647-d5bb5a547dc7" containerName="util" Dec 05 19:28:20 crc kubenswrapper[4982]: E1205 19:28:20.105830 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ef522e3-7448-4261-b647-d5bb5a547dc7" containerName="extract" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.105836 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ef522e3-7448-4261-b647-d5bb5a547dc7" containerName="extract" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.105925 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ef522e3-7448-4261-b647-d5bb5a547dc7" containerName="extract" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.106361 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-s9fkp" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.108560 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.109137 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.109396 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-bx776" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.117399 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-s9fkp"] Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.290130 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djd5m\" (UniqueName: \"kubernetes.io/projected/0a9ab7cb-3dc4-4b37-9bc0-a1bdf690306d-kube-api-access-djd5m\") pod \"nmstate-operator-5b5b58f5c8-s9fkp\" (UID: \"0a9ab7cb-3dc4-4b37-9bc0-a1bdf690306d\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-s9fkp" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.390915 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djd5m\" (UniqueName: \"kubernetes.io/projected/0a9ab7cb-3dc4-4b37-9bc0-a1bdf690306d-kube-api-access-djd5m\") pod \"nmstate-operator-5b5b58f5c8-s9fkp\" (UID: \"0a9ab7cb-3dc4-4b37-9bc0-a1bdf690306d\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-s9fkp" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.411000 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djd5m\" (UniqueName: \"kubernetes.io/projected/0a9ab7cb-3dc4-4b37-9bc0-a1bdf690306d-kube-api-access-djd5m\") pod \"nmstate-operator-5b5b58f5c8-s9fkp\" (UID: \"0a9ab7cb-3dc4-4b37-9bc0-a1bdf690306d\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-s9fkp" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.425550 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-s9fkp" Dec 05 19:28:20 crc kubenswrapper[4982]: I1205 19:28:20.630424 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-s9fkp"] Dec 05 19:28:21 crc kubenswrapper[4982]: I1205 19:28:21.478122 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-s9fkp" event={"ID":"0a9ab7cb-3dc4-4b37-9bc0-a1bdf690306d","Type":"ContainerStarted","Data":"384c0f54746f993675fead55476835dda4ce6ebb34f14863671b28c52395b2d6"} Dec 05 19:28:23 crc kubenswrapper[4982]: I1205 19:28:23.489315 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-s9fkp" event={"ID":"0a9ab7cb-3dc4-4b37-9bc0-a1bdf690306d","Type":"ContainerStarted","Data":"59dbdfb53f017dfb3f21b1a54b5de485e871f81a03d9e808858e2bf059163256"} Dec 05 19:28:23 crc kubenswrapper[4982]: I1205 19:28:23.505142 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-s9fkp" podStartSLOduration=1.619325394 podStartE2EDuration="3.505123351s" podCreationTimestamp="2025-12-05 19:28:20 +0000 UTC" firstStartedPulling="2025-12-05 19:28:20.635104484 +0000 UTC m=+879.516990469" lastFinishedPulling="2025-12-05 19:28:22.520902431 +0000 UTC m=+881.402788426" observedRunningTime="2025-12-05 19:28:23.502939096 +0000 UTC m=+882.384825101" watchObservedRunningTime="2025-12-05 19:28:23.505123351 +0000 UTC m=+882.387009356" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.518233 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-l8vt9"] Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.520136 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-l8vt9" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.526626 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-t7gtr" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.540571 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh"] Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.541324 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.546935 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.562496 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-4dnv7"] Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.563357 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.566700 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-l8vt9"] Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.580353 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh"] Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.661329 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2"] Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.662014 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.663580 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.663737 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-cgjvs" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.663881 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.672407 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2"] Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.718253 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/38b59b38-39e3-4a09-b50e-0cfa9035cd3f-ovs-socket\") pod \"nmstate-handler-4dnv7\" (UID: \"38b59b38-39e3-4a09-b50e-0cfa9035cd3f\") " pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.718295 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5twvv\" (UniqueName: \"kubernetes.io/projected/38b59b38-39e3-4a09-b50e-0cfa9035cd3f-kube-api-access-5twvv\") pod \"nmstate-handler-4dnv7\" (UID: \"38b59b38-39e3-4a09-b50e-0cfa9035cd3f\") " pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.718329 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxhj6\" (UniqueName: \"kubernetes.io/projected/d69403b9-dbd1-4059-afca-e7a907dd2c08-kube-api-access-hxhj6\") pod \"nmstate-metrics-7f946cbc9-l8vt9\" (UID: \"d69403b9-dbd1-4059-afca-e7a907dd2c08\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-l8vt9" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.718454 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/38b59b38-39e3-4a09-b50e-0cfa9035cd3f-nmstate-lock\") pod \"nmstate-handler-4dnv7\" (UID: \"38b59b38-39e3-4a09-b50e-0cfa9035cd3f\") " pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.718548 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/085cb474-3881-41ed-b0fa-6a3d237ec343-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-lrtmh\" (UID: \"085cb474-3881-41ed-b0fa-6a3d237ec343\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.718579 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpb8j\" (UniqueName: \"kubernetes.io/projected/085cb474-3881-41ed-b0fa-6a3d237ec343-kube-api-access-mpb8j\") pod \"nmstate-webhook-5f6d4c5ccb-lrtmh\" (UID: \"085cb474-3881-41ed-b0fa-6a3d237ec343\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.718806 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/38b59b38-39e3-4a09-b50e-0cfa9035cd3f-dbus-socket\") pod \"nmstate-handler-4dnv7\" (UID: \"38b59b38-39e3-4a09-b50e-0cfa9035cd3f\") " pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.821733 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxhj6\" (UniqueName: \"kubernetes.io/projected/d69403b9-dbd1-4059-afca-e7a907dd2c08-kube-api-access-hxhj6\") pod \"nmstate-metrics-7f946cbc9-l8vt9\" (UID: \"d69403b9-dbd1-4059-afca-e7a907dd2c08\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-l8vt9" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.821813 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxf4g\" (UniqueName: \"kubernetes.io/projected/47097c85-dd76-46fd-b837-c5f9e2f5b599-kube-api-access-lxf4g\") pod \"nmstate-console-plugin-7fbb5f6569-2zqs2\" (UID: \"47097c85-dd76-46fd-b837-c5f9e2f5b599\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.821849 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/38b59b38-39e3-4a09-b50e-0cfa9035cd3f-nmstate-lock\") pod \"nmstate-handler-4dnv7\" (UID: \"38b59b38-39e3-4a09-b50e-0cfa9035cd3f\") " pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.821889 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/085cb474-3881-41ed-b0fa-6a3d237ec343-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-lrtmh\" (UID: \"085cb474-3881-41ed-b0fa-6a3d237ec343\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.821912 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpb8j\" (UniqueName: \"kubernetes.io/projected/085cb474-3881-41ed-b0fa-6a3d237ec343-kube-api-access-mpb8j\") pod \"nmstate-webhook-5f6d4c5ccb-lrtmh\" (UID: \"085cb474-3881-41ed-b0fa-6a3d237ec343\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.821964 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/47097c85-dd76-46fd-b837-c5f9e2f5b599-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-2zqs2\" (UID: \"47097c85-dd76-46fd-b837-c5f9e2f5b599\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.821980 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/38b59b38-39e3-4a09-b50e-0cfa9035cd3f-nmstate-lock\") pod \"nmstate-handler-4dnv7\" (UID: \"38b59b38-39e3-4a09-b50e-0cfa9035cd3f\") " pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.821995 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/38b59b38-39e3-4a09-b50e-0cfa9035cd3f-dbus-socket\") pod \"nmstate-handler-4dnv7\" (UID: \"38b59b38-39e3-4a09-b50e-0cfa9035cd3f\") " pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.822128 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/38b59b38-39e3-4a09-b50e-0cfa9035cd3f-ovs-socket\") pod \"nmstate-handler-4dnv7\" (UID: \"38b59b38-39e3-4a09-b50e-0cfa9035cd3f\") " pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.822191 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5twvv\" (UniqueName: \"kubernetes.io/projected/38b59b38-39e3-4a09-b50e-0cfa9035cd3f-kube-api-access-5twvv\") pod \"nmstate-handler-4dnv7\" (UID: \"38b59b38-39e3-4a09-b50e-0cfa9035cd3f\") " pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.822233 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/47097c85-dd76-46fd-b837-c5f9e2f5b599-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-2zqs2\" (UID: \"47097c85-dd76-46fd-b837-c5f9e2f5b599\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.822306 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/38b59b38-39e3-4a09-b50e-0cfa9035cd3f-dbus-socket\") pod \"nmstate-handler-4dnv7\" (UID: \"38b59b38-39e3-4a09-b50e-0cfa9035cd3f\") " pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.822315 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/38b59b38-39e3-4a09-b50e-0cfa9035cd3f-ovs-socket\") pod \"nmstate-handler-4dnv7\" (UID: \"38b59b38-39e3-4a09-b50e-0cfa9035cd3f\") " pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.841294 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/085cb474-3881-41ed-b0fa-6a3d237ec343-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-lrtmh\" (UID: \"085cb474-3881-41ed-b0fa-6a3d237ec343\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.848376 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5twvv\" (UniqueName: \"kubernetes.io/projected/38b59b38-39e3-4a09-b50e-0cfa9035cd3f-kube-api-access-5twvv\") pod \"nmstate-handler-4dnv7\" (UID: \"38b59b38-39e3-4a09-b50e-0cfa9035cd3f\") " pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.853691 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpb8j\" (UniqueName: \"kubernetes.io/projected/085cb474-3881-41ed-b0fa-6a3d237ec343-kube-api-access-mpb8j\") pod \"nmstate-webhook-5f6d4c5ccb-lrtmh\" (UID: \"085cb474-3881-41ed-b0fa-6a3d237ec343\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.857057 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-8f4dcd9df-hs694"] Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.857930 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.862650 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.868050 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxhj6\" (UniqueName: \"kubernetes.io/projected/d69403b9-dbd1-4059-afca-e7a907dd2c08-kube-api-access-hxhj6\") pod \"nmstate-metrics-7f946cbc9-l8vt9\" (UID: \"d69403b9-dbd1-4059-afca-e7a907dd2c08\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-l8vt9" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.880884 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-8f4dcd9df-hs694"] Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.884570 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.923252 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-console-serving-cert\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.923298 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-service-ca\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.923357 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/47097c85-dd76-46fd-b837-c5f9e2f5b599-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-2zqs2\" (UID: \"47097c85-dd76-46fd-b837-c5f9e2f5b599\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.923387 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-oauth-serving-cert\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.923456 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-console-config\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.923485 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/47097c85-dd76-46fd-b837-c5f9e2f5b599-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-2zqs2\" (UID: \"47097c85-dd76-46fd-b837-c5f9e2f5b599\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.923511 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-trusted-ca-bundle\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.923544 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zwxj\" (UniqueName: \"kubernetes.io/projected/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-kube-api-access-4zwxj\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.923576 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxf4g\" (UniqueName: \"kubernetes.io/projected/47097c85-dd76-46fd-b837-c5f9e2f5b599-kube-api-access-lxf4g\") pod \"nmstate-console-plugin-7fbb5f6569-2zqs2\" (UID: \"47097c85-dd76-46fd-b837-c5f9e2f5b599\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.923605 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-console-oauth-config\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.924663 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/47097c85-dd76-46fd-b837-c5f9e2f5b599-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-2zqs2\" (UID: \"47097c85-dd76-46fd-b837-c5f9e2f5b599\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.929869 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/47097c85-dd76-46fd-b837-c5f9e2f5b599-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-2zqs2\" (UID: \"47097c85-dd76-46fd-b837-c5f9e2f5b599\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.941112 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxf4g\" (UniqueName: \"kubernetes.io/projected/47097c85-dd76-46fd-b837-c5f9e2f5b599-kube-api-access-lxf4g\") pod \"nmstate-console-plugin-7fbb5f6569-2zqs2\" (UID: \"47097c85-dd76-46fd-b837-c5f9e2f5b599\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" Dec 05 19:28:29 crc kubenswrapper[4982]: I1205 19:28:29.976500 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.025070 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-oauth-serving-cert\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.025139 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-console-config\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.025209 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-trusted-ca-bundle\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.026357 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-oauth-serving-cert\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.026383 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-console-config\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.026473 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zwxj\" (UniqueName: \"kubernetes.io/projected/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-kube-api-access-4zwxj\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.026525 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-console-oauth-config\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.027277 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-trusted-ca-bundle\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.026556 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-console-serving-cert\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.027420 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-service-ca\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.028268 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-service-ca\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.038181 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-console-oauth-config\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.038490 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-console-serving-cert\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.044191 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zwxj\" (UniqueName: \"kubernetes.io/projected/9d8c7eee-debd-49b1-94f3-4b545d83b9fb-kube-api-access-4zwxj\") pod \"console-8f4dcd9df-hs694\" (UID: \"9d8c7eee-debd-49b1-94f3-4b545d83b9fb\") " pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.149575 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-l8vt9" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.186442 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2"] Dec 05 19:28:30 crc kubenswrapper[4982]: W1205 19:28:30.190598 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47097c85_dd76_46fd_b837_c5f9e2f5b599.slice/crio-4ca15c908e517a7f75ea23651621c3849ad2f806ead21bd049e0e4f920eeae7a WatchSource:0}: Error finding container 4ca15c908e517a7f75ea23651621c3849ad2f806ead21bd049e0e4f920eeae7a: Status 404 returned error can't find the container with id 4ca15c908e517a7f75ea23651621c3849ad2f806ead21bd049e0e4f920eeae7a Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.246482 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.314547 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh"] Dec 05 19:28:30 crc kubenswrapper[4982]: W1205 19:28:30.334298 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod085cb474_3881_41ed_b0fa_6a3d237ec343.slice/crio-d5bf6efc456df580a2e75849d3f47f000d59e9ef2b1d6608e339d605d5944e37 WatchSource:0}: Error finding container d5bf6efc456df580a2e75849d3f47f000d59e9ef2b1d6608e339d605d5944e37: Status 404 returned error can't find the container with id d5bf6efc456df580a2e75849d3f47f000d59e9ef2b1d6608e339d605d5944e37 Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.349649 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-l8vt9"] Dec 05 19:28:30 crc kubenswrapper[4982]: W1205 19:28:30.368276 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd69403b9_dbd1_4059_afca_e7a907dd2c08.slice/crio-ac443f66fe0c88f0b4385276cf926ede31dc3b6f24e327d242ec9fe1ce28113d WatchSource:0}: Error finding container ac443f66fe0c88f0b4385276cf926ede31dc3b6f24e327d242ec9fe1ce28113d: Status 404 returned error can't find the container with id ac443f66fe0c88f0b4385276cf926ede31dc3b6f24e327d242ec9fe1ce28113d Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.452413 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-8f4dcd9df-hs694"] Dec 05 19:28:30 crc kubenswrapper[4982]: W1205 19:28:30.455651 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d8c7eee_debd_49b1_94f3_4b545d83b9fb.slice/crio-a5b6b2732e5050739925fbc0ab98df0004670ca1a745cf6f0e6dacae55b8fbb6 WatchSource:0}: Error finding container a5b6b2732e5050739925fbc0ab98df0004670ca1a745cf6f0e6dacae55b8fbb6: Status 404 returned error can't find the container with id a5b6b2732e5050739925fbc0ab98df0004670ca1a745cf6f0e6dacae55b8fbb6 Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.534429 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" event={"ID":"47097c85-dd76-46fd-b837-c5f9e2f5b599","Type":"ContainerStarted","Data":"4ca15c908e517a7f75ea23651621c3849ad2f806ead21bd049e0e4f920eeae7a"} Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.535507 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-4dnv7" event={"ID":"38b59b38-39e3-4a09-b50e-0cfa9035cd3f","Type":"ContainerStarted","Data":"c0071a0de357236a8a12d80697fc47dd0bdadcd15a086ac0bbefcbb8a580c2e9"} Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.536478 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-l8vt9" event={"ID":"d69403b9-dbd1-4059-afca-e7a907dd2c08","Type":"ContainerStarted","Data":"ac443f66fe0c88f0b4385276cf926ede31dc3b6f24e327d242ec9fe1ce28113d"} Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.537343 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" event={"ID":"085cb474-3881-41ed-b0fa-6a3d237ec343","Type":"ContainerStarted","Data":"d5bf6efc456df580a2e75849d3f47f000d59e9ef2b1d6608e339d605d5944e37"} Dec 05 19:28:30 crc kubenswrapper[4982]: I1205 19:28:30.538328 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-8f4dcd9df-hs694" event={"ID":"9d8c7eee-debd-49b1-94f3-4b545d83b9fb","Type":"ContainerStarted","Data":"a5b6b2732e5050739925fbc0ab98df0004670ca1a745cf6f0e6dacae55b8fbb6"} Dec 05 19:28:31 crc kubenswrapper[4982]: I1205 19:28:31.546799 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-8f4dcd9df-hs694" event={"ID":"9d8c7eee-debd-49b1-94f3-4b545d83b9fb","Type":"ContainerStarted","Data":"237fbe09e98511760e638b3545dc732fbba3afd99a7bbf323aa19c42d112412d"} Dec 05 19:28:31 crc kubenswrapper[4982]: I1205 19:28:31.566073 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-8f4dcd9df-hs694" podStartSLOduration=2.566055616 podStartE2EDuration="2.566055616s" podCreationTimestamp="2025-12-05 19:28:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:28:31.562002594 +0000 UTC m=+890.443888619" watchObservedRunningTime="2025-12-05 19:28:31.566055616 +0000 UTC m=+890.447941601" Dec 05 19:28:33 crc kubenswrapper[4982]: I1205 19:28:33.561210 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" event={"ID":"47097c85-dd76-46fd-b837-c5f9e2f5b599","Type":"ContainerStarted","Data":"43b980d5b0e38fec0a35d5201a0ecba70b6ac7e6a2c14235a6fd2e7c59770085"} Dec 05 19:28:33 crc kubenswrapper[4982]: I1205 19:28:33.564077 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-4dnv7" event={"ID":"38b59b38-39e3-4a09-b50e-0cfa9035cd3f","Type":"ContainerStarted","Data":"7be7cf69caed346c2317f97a55218f69631c95907b038c78e4a95624e6004832"} Dec 05 19:28:33 crc kubenswrapper[4982]: I1205 19:28:33.564191 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:33 crc kubenswrapper[4982]: I1205 19:28:33.569796 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-l8vt9" event={"ID":"d69403b9-dbd1-4059-afca-e7a907dd2c08","Type":"ContainerStarted","Data":"d7c58226789f5e30605ece847468e49ee9f39273b63db404f6c3afe6c3154f60"} Dec 05 19:28:33 crc kubenswrapper[4982]: I1205 19:28:33.574614 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" event={"ID":"085cb474-3881-41ed-b0fa-6a3d237ec343","Type":"ContainerStarted","Data":"b59b99b3fecfe3e76ef6c4b36cc22625295ee204f9305a976d2a400b2b2e13ed"} Dec 05 19:28:33 crc kubenswrapper[4982]: I1205 19:28:33.574809 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-2zqs2" podStartSLOduration=2.276878141 podStartE2EDuration="4.574794017s" podCreationTimestamp="2025-12-05 19:28:29 +0000 UTC" firstStartedPulling="2025-12-05 19:28:30.19255634 +0000 UTC m=+889.074442335" lastFinishedPulling="2025-12-05 19:28:32.490472216 +0000 UTC m=+891.372358211" observedRunningTime="2025-12-05 19:28:33.573205607 +0000 UTC m=+892.455091622" watchObservedRunningTime="2025-12-05 19:28:33.574794017 +0000 UTC m=+892.456680012" Dec 05 19:28:33 crc kubenswrapper[4982]: I1205 19:28:33.574914 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" Dec 05 19:28:33 crc kubenswrapper[4982]: I1205 19:28:33.627185 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-4dnv7" podStartSLOduration=2.076545367 podStartE2EDuration="4.627166591s" podCreationTimestamp="2025-12-05 19:28:29 +0000 UTC" firstStartedPulling="2025-12-05 19:28:29.941252847 +0000 UTC m=+888.823138862" lastFinishedPulling="2025-12-05 19:28:32.491874091 +0000 UTC m=+891.373760086" observedRunningTime="2025-12-05 19:28:33.61877664 +0000 UTC m=+892.500662635" watchObservedRunningTime="2025-12-05 19:28:33.627166591 +0000 UTC m=+892.509052586" Dec 05 19:28:33 crc kubenswrapper[4982]: I1205 19:28:33.640046 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" podStartSLOduration=2.484213573 podStartE2EDuration="4.640026843s" podCreationTimestamp="2025-12-05 19:28:29 +0000 UTC" firstStartedPulling="2025-12-05 19:28:30.33604085 +0000 UTC m=+889.217926845" lastFinishedPulling="2025-12-05 19:28:32.49185412 +0000 UTC m=+891.373740115" observedRunningTime="2025-12-05 19:28:33.631701445 +0000 UTC m=+892.513587450" watchObservedRunningTime="2025-12-05 19:28:33.640026843 +0000 UTC m=+892.521912848" Dec 05 19:28:34 crc kubenswrapper[4982]: I1205 19:28:34.591369 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-l8vt9" event={"ID":"d69403b9-dbd1-4059-afca-e7a907dd2c08","Type":"ContainerStarted","Data":"2855f91ac185c16c5d33d842641445ace0ac2a9a6ad032dd8ddc937edbb0e998"} Dec 05 19:28:34 crc kubenswrapper[4982]: I1205 19:28:34.617953 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-l8vt9" podStartSLOduration=1.553864943 podStartE2EDuration="5.617932394s" podCreationTimestamp="2025-12-05 19:28:29 +0000 UTC" firstStartedPulling="2025-12-05 19:28:30.37035217 +0000 UTC m=+889.252238165" lastFinishedPulling="2025-12-05 19:28:34.434419611 +0000 UTC m=+893.316305616" observedRunningTime="2025-12-05 19:28:34.612133759 +0000 UTC m=+893.494019764" watchObservedRunningTime="2025-12-05 19:28:34.617932394 +0000 UTC m=+893.499818389" Dec 05 19:28:39 crc kubenswrapper[4982]: I1205 19:28:39.925295 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-4dnv7" Dec 05 19:28:40 crc kubenswrapper[4982]: I1205 19:28:40.247540 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:40 crc kubenswrapper[4982]: I1205 19:28:40.247610 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:40 crc kubenswrapper[4982]: I1205 19:28:40.255422 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:40 crc kubenswrapper[4982]: I1205 19:28:40.636764 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-8f4dcd9df-hs694" Dec 05 19:28:40 crc kubenswrapper[4982]: I1205 19:28:40.702700 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-k66n9"] Dec 05 19:28:49 crc kubenswrapper[4982]: I1205 19:28:49.872915 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-lrtmh" Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.395664 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr"] Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.398117 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.400880 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.403143 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr"] Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.462320 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6b00ab98-889d-421f-a3f4-1d9bacddb215-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr\" (UID: \"6b00ab98-889d-421f-a3f4-1d9bacddb215\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.462649 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxrm2\" (UniqueName: \"kubernetes.io/projected/6b00ab98-889d-421f-a3f4-1d9bacddb215-kube-api-access-sxrm2\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr\" (UID: \"6b00ab98-889d-421f-a3f4-1d9bacddb215\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.462745 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6b00ab98-889d-421f-a3f4-1d9bacddb215-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr\" (UID: \"6b00ab98-889d-421f-a3f4-1d9bacddb215\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.564029 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6b00ab98-889d-421f-a3f4-1d9bacddb215-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr\" (UID: \"6b00ab98-889d-421f-a3f4-1d9bacddb215\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.564077 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxrm2\" (UniqueName: \"kubernetes.io/projected/6b00ab98-889d-421f-a3f4-1d9bacddb215-kube-api-access-sxrm2\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr\" (UID: \"6b00ab98-889d-421f-a3f4-1d9bacddb215\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.564094 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6b00ab98-889d-421f-a3f4-1d9bacddb215-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr\" (UID: \"6b00ab98-889d-421f-a3f4-1d9bacddb215\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.564562 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6b00ab98-889d-421f-a3f4-1d9bacddb215-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr\" (UID: \"6b00ab98-889d-421f-a3f4-1d9bacddb215\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.564711 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6b00ab98-889d-421f-a3f4-1d9bacddb215-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr\" (UID: \"6b00ab98-889d-421f-a3f4-1d9bacddb215\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.590807 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxrm2\" (UniqueName: \"kubernetes.io/projected/6b00ab98-889d-421f-a3f4-1d9bacddb215-kube-api-access-sxrm2\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr\" (UID: \"6b00ab98-889d-421f-a3f4-1d9bacddb215\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:04 crc kubenswrapper[4982]: I1205 19:29:04.727345 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:05 crc kubenswrapper[4982]: I1205 19:29:05.177604 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr"] Dec 05 19:29:05 crc kubenswrapper[4982]: I1205 19:29:05.753393 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-k66n9" podUID="691aea14-6408-453c-b4c1-99e2760ab531" containerName="console" containerID="cri-o://5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f" gracePeriod=15 Dec 05 19:29:05 crc kubenswrapper[4982]: I1205 19:29:05.809657 4982 generic.go:334] "Generic (PLEG): container finished" podID="6b00ab98-889d-421f-a3f4-1d9bacddb215" containerID="04e7d5982dbfc74ba7979b9548b28e5b1e399b06aec44954aabb3b9f9b9805f8" exitCode=0 Dec 05 19:29:05 crc kubenswrapper[4982]: I1205 19:29:05.809747 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" event={"ID":"6b00ab98-889d-421f-a3f4-1d9bacddb215","Type":"ContainerDied","Data":"04e7d5982dbfc74ba7979b9548b28e5b1e399b06aec44954aabb3b9f9b9805f8"} Dec 05 19:29:05 crc kubenswrapper[4982]: I1205 19:29:05.810043 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" event={"ID":"6b00ab98-889d-421f-a3f4-1d9bacddb215","Type":"ContainerStarted","Data":"4a035d6080d4539c1a7f965b81ad46739b5be535dcc3eee6d4b84bfe0bff19a9"} Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.124562 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-k66n9_691aea14-6408-453c-b4c1-99e2760ab531/console/0.log" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.124639 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.188021 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/691aea14-6408-453c-b4c1-99e2760ab531-console-oauth-config\") pod \"691aea14-6408-453c-b4c1-99e2760ab531\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.188560 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-service-ca\") pod \"691aea14-6408-453c-b4c1-99e2760ab531\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.188768 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/691aea14-6408-453c-b4c1-99e2760ab531-console-serving-cert\") pod \"691aea14-6408-453c-b4c1-99e2760ab531\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.188877 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6zv8z\" (UniqueName: \"kubernetes.io/projected/691aea14-6408-453c-b4c1-99e2760ab531-kube-api-access-6zv8z\") pod \"691aea14-6408-453c-b4c1-99e2760ab531\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.188969 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-oauth-serving-cert\") pod \"691aea14-6408-453c-b4c1-99e2760ab531\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.189130 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-console-config\") pod \"691aea14-6408-453c-b4c1-99e2760ab531\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.189500 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-trusted-ca-bundle\") pod \"691aea14-6408-453c-b4c1-99e2760ab531\" (UID: \"691aea14-6408-453c-b4c1-99e2760ab531\") " Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.189203 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-service-ca" (OuterVolumeSpecName: "service-ca") pod "691aea14-6408-453c-b4c1-99e2760ab531" (UID: "691aea14-6408-453c-b4c1-99e2760ab531"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.189504 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "691aea14-6408-453c-b4c1-99e2760ab531" (UID: "691aea14-6408-453c-b4c1-99e2760ab531"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.189878 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-console-config" (OuterVolumeSpecName: "console-config") pod "691aea14-6408-453c-b4c1-99e2760ab531" (UID: "691aea14-6408-453c-b4c1-99e2760ab531"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.190376 4982 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.190543 4982 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.190695 4982 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.190701 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "691aea14-6408-453c-b4c1-99e2760ab531" (UID: "691aea14-6408-453c-b4c1-99e2760ab531"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.194332 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/691aea14-6408-453c-b4c1-99e2760ab531-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "691aea14-6408-453c-b4c1-99e2760ab531" (UID: "691aea14-6408-453c-b4c1-99e2760ab531"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.194686 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/691aea14-6408-453c-b4c1-99e2760ab531-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "691aea14-6408-453c-b4c1-99e2760ab531" (UID: "691aea14-6408-453c-b4c1-99e2760ab531"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.194915 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/691aea14-6408-453c-b4c1-99e2760ab531-kube-api-access-6zv8z" (OuterVolumeSpecName: "kube-api-access-6zv8z") pod "691aea14-6408-453c-b4c1-99e2760ab531" (UID: "691aea14-6408-453c-b4c1-99e2760ab531"). InnerVolumeSpecName "kube-api-access-6zv8z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.291706 4982 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/691aea14-6408-453c-b4c1-99e2760ab531-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.291739 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6zv8z\" (UniqueName: \"kubernetes.io/projected/691aea14-6408-453c-b4c1-99e2760ab531-kube-api-access-6zv8z\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.291749 4982 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/691aea14-6408-453c-b4c1-99e2760ab531-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.291759 4982 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/691aea14-6408-453c-b4c1-99e2760ab531-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.829515 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" event={"ID":"6b00ab98-889d-421f-a3f4-1d9bacddb215","Type":"ContainerStarted","Data":"ff58ed641c774bf5f7395ebfd3e90165e66f92c9e26364524c137575c3ba062f"} Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.831863 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-k66n9_691aea14-6408-453c-b4c1-99e2760ab531/console/0.log" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.831981 4982 generic.go:334] "Generic (PLEG): container finished" podID="691aea14-6408-453c-b4c1-99e2760ab531" containerID="5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f" exitCode=2 Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.832093 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-k66n9" event={"ID":"691aea14-6408-453c-b4c1-99e2760ab531","Type":"ContainerDied","Data":"5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f"} Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.832234 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-k66n9" event={"ID":"691aea14-6408-453c-b4c1-99e2760ab531","Type":"ContainerDied","Data":"1812e682c1ca2b9dbaeabca50bc76a803282695470c33492fe8cf90b2245f879"} Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.832334 4982 scope.go:117] "RemoveContainer" containerID="5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.832414 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-k66n9" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.857410 4982 scope.go:117] "RemoveContainer" containerID="5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f" Dec 05 19:29:06 crc kubenswrapper[4982]: E1205 19:29:06.858310 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f\": container with ID starting with 5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f not found: ID does not exist" containerID="5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.858354 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f"} err="failed to get container status \"5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f\": rpc error: code = NotFound desc = could not find container \"5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f\": container with ID starting with 5d0fc87d9475a01067dc4c6817fc9717892b3e1e1f7a1afa429c36aaae136d5f not found: ID does not exist" Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.870461 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-k66n9"] Dec 05 19:29:06 crc kubenswrapper[4982]: I1205 19:29:06.876319 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-k66n9"] Dec 05 19:29:07 crc kubenswrapper[4982]: I1205 19:29:07.399705 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="691aea14-6408-453c-b4c1-99e2760ab531" path="/var/lib/kubelet/pods/691aea14-6408-453c-b4c1-99e2760ab531/volumes" Dec 05 19:29:07 crc kubenswrapper[4982]: I1205 19:29:07.844259 4982 generic.go:334] "Generic (PLEG): container finished" podID="6b00ab98-889d-421f-a3f4-1d9bacddb215" containerID="ff58ed641c774bf5f7395ebfd3e90165e66f92c9e26364524c137575c3ba062f" exitCode=0 Dec 05 19:29:07 crc kubenswrapper[4982]: I1205 19:29:07.844301 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" event={"ID":"6b00ab98-889d-421f-a3f4-1d9bacddb215","Type":"ContainerDied","Data":"ff58ed641c774bf5f7395ebfd3e90165e66f92c9e26364524c137575c3ba062f"} Dec 05 19:29:08 crc kubenswrapper[4982]: I1205 19:29:08.855660 4982 generic.go:334] "Generic (PLEG): container finished" podID="6b00ab98-889d-421f-a3f4-1d9bacddb215" containerID="1547ef61f79e0567f523ae22c168dadd27eacc23a9e53610d47c516dffaa332f" exitCode=0 Dec 05 19:29:08 crc kubenswrapper[4982]: I1205 19:29:08.855703 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" event={"ID":"6b00ab98-889d-421f-a3f4-1d9bacddb215","Type":"ContainerDied","Data":"1547ef61f79e0567f523ae22c168dadd27eacc23a9e53610d47c516dffaa332f"} Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.165266 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.246452 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6b00ab98-889d-421f-a3f4-1d9bacddb215-util\") pod \"6b00ab98-889d-421f-a3f4-1d9bacddb215\" (UID: \"6b00ab98-889d-421f-a3f4-1d9bacddb215\") " Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.246549 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxrm2\" (UniqueName: \"kubernetes.io/projected/6b00ab98-889d-421f-a3f4-1d9bacddb215-kube-api-access-sxrm2\") pod \"6b00ab98-889d-421f-a3f4-1d9bacddb215\" (UID: \"6b00ab98-889d-421f-a3f4-1d9bacddb215\") " Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.246577 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6b00ab98-889d-421f-a3f4-1d9bacddb215-bundle\") pod \"6b00ab98-889d-421f-a3f4-1d9bacddb215\" (UID: \"6b00ab98-889d-421f-a3f4-1d9bacddb215\") " Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.247597 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b00ab98-889d-421f-a3f4-1d9bacddb215-bundle" (OuterVolumeSpecName: "bundle") pod "6b00ab98-889d-421f-a3f4-1d9bacddb215" (UID: "6b00ab98-889d-421f-a3f4-1d9bacddb215"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.251454 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b00ab98-889d-421f-a3f4-1d9bacddb215-kube-api-access-sxrm2" (OuterVolumeSpecName: "kube-api-access-sxrm2") pod "6b00ab98-889d-421f-a3f4-1d9bacddb215" (UID: "6b00ab98-889d-421f-a3f4-1d9bacddb215"). InnerVolumeSpecName "kube-api-access-sxrm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.270585 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b00ab98-889d-421f-a3f4-1d9bacddb215-util" (OuterVolumeSpecName: "util") pod "6b00ab98-889d-421f-a3f4-1d9bacddb215" (UID: "6b00ab98-889d-421f-a3f4-1d9bacddb215"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.347716 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxrm2\" (UniqueName: \"kubernetes.io/projected/6b00ab98-889d-421f-a3f4-1d9bacddb215-kube-api-access-sxrm2\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.347753 4982 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/6b00ab98-889d-421f-a3f4-1d9bacddb215-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.347762 4982 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/6b00ab98-889d-421f-a3f4-1d9bacddb215-util\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.874879 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" event={"ID":"6b00ab98-889d-421f-a3f4-1d9bacddb215","Type":"ContainerDied","Data":"4a035d6080d4539c1a7f965b81ad46739b5be535dcc3eee6d4b84bfe0bff19a9"} Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.874938 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a035d6080d4539c1a7f965b81ad46739b5be535dcc3eee6d4b84bfe0bff19a9" Dec 05 19:29:10 crc kubenswrapper[4982]: I1205 19:29:10.874963 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.783910 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4"] Dec 05 19:29:24 crc kubenswrapper[4982]: E1205 19:29:24.785568 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b00ab98-889d-421f-a3f4-1d9bacddb215" containerName="extract" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.785649 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b00ab98-889d-421f-a3f4-1d9bacddb215" containerName="extract" Dec 05 19:29:24 crc kubenswrapper[4982]: E1205 19:29:24.785708 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b00ab98-889d-421f-a3f4-1d9bacddb215" containerName="pull" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.785758 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b00ab98-889d-421f-a3f4-1d9bacddb215" containerName="pull" Dec 05 19:29:24 crc kubenswrapper[4982]: E1205 19:29:24.785813 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="691aea14-6408-453c-b4c1-99e2760ab531" containerName="console" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.785863 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="691aea14-6408-453c-b4c1-99e2760ab531" containerName="console" Dec 05 19:29:24 crc kubenswrapper[4982]: E1205 19:29:24.785916 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b00ab98-889d-421f-a3f4-1d9bacddb215" containerName="util" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.785974 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b00ab98-889d-421f-a3f4-1d9bacddb215" containerName="util" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.786122 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b00ab98-889d-421f-a3f4-1d9bacddb215" containerName="extract" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.786210 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="691aea14-6408-453c-b4c1-99e2760ab531" containerName="console" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.786784 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.789655 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.789992 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.790130 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.790203 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.792302 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-kscjb" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.816753 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4"] Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.930701 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9mtj\" (UniqueName: \"kubernetes.io/projected/4ba9f951-374b-45ba-a5d1-de7393862f1d-kube-api-access-l9mtj\") pod \"metallb-operator-controller-manager-7d4d6d9964-7tmb4\" (UID: \"4ba9f951-374b-45ba-a5d1-de7393862f1d\") " pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.931003 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4ba9f951-374b-45ba-a5d1-de7393862f1d-apiservice-cert\") pod \"metallb-operator-controller-manager-7d4d6d9964-7tmb4\" (UID: \"4ba9f951-374b-45ba-a5d1-de7393862f1d\") " pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:29:24 crc kubenswrapper[4982]: I1205 19:29:24.931141 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4ba9f951-374b-45ba-a5d1-de7393862f1d-webhook-cert\") pod \"metallb-operator-controller-manager-7d4d6d9964-7tmb4\" (UID: \"4ba9f951-374b-45ba-a5d1-de7393862f1d\") " pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.032222 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4ba9f951-374b-45ba-a5d1-de7393862f1d-webhook-cert\") pod \"metallb-operator-controller-manager-7d4d6d9964-7tmb4\" (UID: \"4ba9f951-374b-45ba-a5d1-de7393862f1d\") " pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.032343 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9mtj\" (UniqueName: \"kubernetes.io/projected/4ba9f951-374b-45ba-a5d1-de7393862f1d-kube-api-access-l9mtj\") pod \"metallb-operator-controller-manager-7d4d6d9964-7tmb4\" (UID: \"4ba9f951-374b-45ba-a5d1-de7393862f1d\") " pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.032397 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4ba9f951-374b-45ba-a5d1-de7393862f1d-apiservice-cert\") pod \"metallb-operator-controller-manager-7d4d6d9964-7tmb4\" (UID: \"4ba9f951-374b-45ba-a5d1-de7393862f1d\") " pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.037797 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/4ba9f951-374b-45ba-a5d1-de7393862f1d-webhook-cert\") pod \"metallb-operator-controller-manager-7d4d6d9964-7tmb4\" (UID: \"4ba9f951-374b-45ba-a5d1-de7393862f1d\") " pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.037896 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/4ba9f951-374b-45ba-a5d1-de7393862f1d-apiservice-cert\") pod \"metallb-operator-controller-manager-7d4d6d9964-7tmb4\" (UID: \"4ba9f951-374b-45ba-a5d1-de7393862f1d\") " pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.049810 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9mtj\" (UniqueName: \"kubernetes.io/projected/4ba9f951-374b-45ba-a5d1-de7393862f1d-kube-api-access-l9mtj\") pod \"metallb-operator-controller-manager-7d4d6d9964-7tmb4\" (UID: \"4ba9f951-374b-45ba-a5d1-de7393862f1d\") " pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.103310 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.104960 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6"] Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.105749 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.111741 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.111915 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.112051 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-zjbq9" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.129653 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6"] Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.239733 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/858ccc6d-475e-4636-b597-c155973b2e85-apiservice-cert\") pod \"metallb-operator-webhook-server-8ddfdf549-2zbg6\" (UID: \"858ccc6d-475e-4636-b597-c155973b2e85\") " pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.239777 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v28wd\" (UniqueName: \"kubernetes.io/projected/858ccc6d-475e-4636-b597-c155973b2e85-kube-api-access-v28wd\") pod \"metallb-operator-webhook-server-8ddfdf549-2zbg6\" (UID: \"858ccc6d-475e-4636-b597-c155973b2e85\") " pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.239850 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/858ccc6d-475e-4636-b597-c155973b2e85-webhook-cert\") pod \"metallb-operator-webhook-server-8ddfdf549-2zbg6\" (UID: \"858ccc6d-475e-4636-b597-c155973b2e85\") " pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.341348 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/858ccc6d-475e-4636-b597-c155973b2e85-webhook-cert\") pod \"metallb-operator-webhook-server-8ddfdf549-2zbg6\" (UID: \"858ccc6d-475e-4636-b597-c155973b2e85\") " pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.341432 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/858ccc6d-475e-4636-b597-c155973b2e85-apiservice-cert\") pod \"metallb-operator-webhook-server-8ddfdf549-2zbg6\" (UID: \"858ccc6d-475e-4636-b597-c155973b2e85\") " pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.341460 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v28wd\" (UniqueName: \"kubernetes.io/projected/858ccc6d-475e-4636-b597-c155973b2e85-kube-api-access-v28wd\") pod \"metallb-operator-webhook-server-8ddfdf549-2zbg6\" (UID: \"858ccc6d-475e-4636-b597-c155973b2e85\") " pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.345805 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/858ccc6d-475e-4636-b597-c155973b2e85-webhook-cert\") pod \"metallb-operator-webhook-server-8ddfdf549-2zbg6\" (UID: \"858ccc6d-475e-4636-b597-c155973b2e85\") " pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.347465 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/858ccc6d-475e-4636-b597-c155973b2e85-apiservice-cert\") pod \"metallb-operator-webhook-server-8ddfdf549-2zbg6\" (UID: \"858ccc6d-475e-4636-b597-c155973b2e85\") " pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.366741 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v28wd\" (UniqueName: \"kubernetes.io/projected/858ccc6d-475e-4636-b597-c155973b2e85-kube-api-access-v28wd\") pod \"metallb-operator-webhook-server-8ddfdf549-2zbg6\" (UID: \"858ccc6d-475e-4636-b597-c155973b2e85\") " pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.419911 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.553259 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4"] Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.854627 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6"] Dec 05 19:29:25 crc kubenswrapper[4982]: W1205 19:29:25.857134 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod858ccc6d_475e_4636_b597_c155973b2e85.slice/crio-3e62625880235f507f0d43cb180f6cd51f4430c575535b54e8ec2d5e82593d28 WatchSource:0}: Error finding container 3e62625880235f507f0d43cb180f6cd51f4430c575535b54e8ec2d5e82593d28: Status 404 returned error can't find the container with id 3e62625880235f507f0d43cb180f6cd51f4430c575535b54e8ec2d5e82593d28 Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.979016 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" event={"ID":"858ccc6d-475e-4636-b597-c155973b2e85","Type":"ContainerStarted","Data":"3e62625880235f507f0d43cb180f6cd51f4430c575535b54e8ec2d5e82593d28"} Dec 05 19:29:25 crc kubenswrapper[4982]: I1205 19:29:25.980206 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" event={"ID":"4ba9f951-374b-45ba-a5d1-de7393862f1d","Type":"ContainerStarted","Data":"f6a462c54e72d84bad607961b88ac16663a54a9a6dbd9b49d2319d4e4d02ae31"} Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.144291 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ffqr9"] Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.145989 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.159925 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ffqr9"] Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.270260 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3340096c-a963-4bbc-a77a-c064ab263f12-catalog-content\") pod \"community-operators-ffqr9\" (UID: \"3340096c-a963-4bbc-a77a-c064ab263f12\") " pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.270510 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwrf8\" (UniqueName: \"kubernetes.io/projected/3340096c-a963-4bbc-a77a-c064ab263f12-kube-api-access-cwrf8\") pod \"community-operators-ffqr9\" (UID: \"3340096c-a963-4bbc-a77a-c064ab263f12\") " pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.270562 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3340096c-a963-4bbc-a77a-c064ab263f12-utilities\") pod \"community-operators-ffqr9\" (UID: \"3340096c-a963-4bbc-a77a-c064ab263f12\") " pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.371728 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3340096c-a963-4bbc-a77a-c064ab263f12-catalog-content\") pod \"community-operators-ffqr9\" (UID: \"3340096c-a963-4bbc-a77a-c064ab263f12\") " pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.371780 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwrf8\" (UniqueName: \"kubernetes.io/projected/3340096c-a963-4bbc-a77a-c064ab263f12-kube-api-access-cwrf8\") pod \"community-operators-ffqr9\" (UID: \"3340096c-a963-4bbc-a77a-c064ab263f12\") " pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.371831 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3340096c-a963-4bbc-a77a-c064ab263f12-utilities\") pod \"community-operators-ffqr9\" (UID: \"3340096c-a963-4bbc-a77a-c064ab263f12\") " pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.372318 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3340096c-a963-4bbc-a77a-c064ab263f12-utilities\") pod \"community-operators-ffqr9\" (UID: \"3340096c-a963-4bbc-a77a-c064ab263f12\") " pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.372711 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3340096c-a963-4bbc-a77a-c064ab263f12-catalog-content\") pod \"community-operators-ffqr9\" (UID: \"3340096c-a963-4bbc-a77a-c064ab263f12\") " pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.396728 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwrf8\" (UniqueName: \"kubernetes.io/projected/3340096c-a963-4bbc-a77a-c064ab263f12-kube-api-access-cwrf8\") pod \"community-operators-ffqr9\" (UID: \"3340096c-a963-4bbc-a77a-c064ab263f12\") " pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.524469 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:27 crc kubenswrapper[4982]: I1205 19:29:27.821916 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ffqr9"] Dec 05 19:29:28 crc kubenswrapper[4982]: I1205 19:29:28.020599 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffqr9" event={"ID":"3340096c-a963-4bbc-a77a-c064ab263f12","Type":"ContainerStarted","Data":"6b984e50e6d4ee04d010ceeb36f7184e6175532ab58fb79100c9c957437b444d"} Dec 05 19:29:28 crc kubenswrapper[4982]: I1205 19:29:28.020637 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffqr9" event={"ID":"3340096c-a963-4bbc-a77a-c064ab263f12","Type":"ContainerStarted","Data":"8a24775666a98515583c7d7a559781af12f6673c5d50999b23d636f8d74cf40d"} Dec 05 19:29:29 crc kubenswrapper[4982]: I1205 19:29:29.050291 4982 generic.go:334] "Generic (PLEG): container finished" podID="3340096c-a963-4bbc-a77a-c064ab263f12" containerID="6b984e50e6d4ee04d010ceeb36f7184e6175532ab58fb79100c9c957437b444d" exitCode=0 Dec 05 19:29:29 crc kubenswrapper[4982]: I1205 19:29:29.050642 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffqr9" event={"ID":"3340096c-a963-4bbc-a77a-c064ab263f12","Type":"ContainerDied","Data":"6b984e50e6d4ee04d010ceeb36f7184e6175532ab58fb79100c9c957437b444d"} Dec 05 19:29:32 crc kubenswrapper[4982]: I1205 19:29:32.076571 4982 generic.go:334] "Generic (PLEG): container finished" podID="3340096c-a963-4bbc-a77a-c064ab263f12" containerID="7d555b4902353c98807ff371443ed91d88f73baefc4fad6aa6881a405670f09c" exitCode=0 Dec 05 19:29:32 crc kubenswrapper[4982]: I1205 19:29:32.076626 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffqr9" event={"ID":"3340096c-a963-4bbc-a77a-c064ab263f12","Type":"ContainerDied","Data":"7d555b4902353c98807ff371443ed91d88f73baefc4fad6aa6881a405670f09c"} Dec 05 19:29:32 crc kubenswrapper[4982]: I1205 19:29:32.078716 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" event={"ID":"858ccc6d-475e-4636-b597-c155973b2e85","Type":"ContainerStarted","Data":"2b7ba49f23673df6ded133ab0be7fec860194fcf1d187284949b29b025849ca8"} Dec 05 19:29:32 crc kubenswrapper[4982]: I1205 19:29:32.078781 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:32 crc kubenswrapper[4982]: I1205 19:29:32.080872 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" event={"ID":"4ba9f951-374b-45ba-a5d1-de7393862f1d","Type":"ContainerStarted","Data":"22cf4b114276d99d1444405bdf7ba3becbdeac484141f1e2d6bf44c919d7c9d4"} Dec 05 19:29:32 crc kubenswrapper[4982]: I1205 19:29:32.081792 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:29:32 crc kubenswrapper[4982]: I1205 19:29:32.120603 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" podStartSLOduration=1.738282694 podStartE2EDuration="7.120582413s" podCreationTimestamp="2025-12-05 19:29:25 +0000 UTC" firstStartedPulling="2025-12-05 19:29:25.859724955 +0000 UTC m=+944.741610950" lastFinishedPulling="2025-12-05 19:29:31.242024654 +0000 UTC m=+950.123910669" observedRunningTime="2025-12-05 19:29:32.11447879 +0000 UTC m=+950.996364785" watchObservedRunningTime="2025-12-05 19:29:32.120582413 +0000 UTC m=+951.002468408" Dec 05 19:29:32 crc kubenswrapper[4982]: I1205 19:29:32.138335 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" podStartSLOduration=2.499411672 podStartE2EDuration="8.138315668s" podCreationTimestamp="2025-12-05 19:29:24 +0000 UTC" firstStartedPulling="2025-12-05 19:29:25.587737352 +0000 UTC m=+944.469623347" lastFinishedPulling="2025-12-05 19:29:31.226641348 +0000 UTC m=+950.108527343" observedRunningTime="2025-12-05 19:29:32.132902602 +0000 UTC m=+951.014788607" watchObservedRunningTime="2025-12-05 19:29:32.138315668 +0000 UTC m=+951.020201673" Dec 05 19:29:33 crc kubenswrapper[4982]: I1205 19:29:33.092009 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffqr9" event={"ID":"3340096c-a963-4bbc-a77a-c064ab263f12","Type":"ContainerStarted","Data":"6b4bb241382d2a8e8df491c5057cb32a3b46856f3a5788861f20c8f6fe776fa2"} Dec 05 19:29:33 crc kubenswrapper[4982]: I1205 19:29:33.113750 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ffqr9" podStartSLOduration=1.625742962 podStartE2EDuration="6.113735297s" podCreationTimestamp="2025-12-05 19:29:27 +0000 UTC" firstStartedPulling="2025-12-05 19:29:28.021799232 +0000 UTC m=+946.903685227" lastFinishedPulling="2025-12-05 19:29:32.509791567 +0000 UTC m=+951.391677562" observedRunningTime="2025-12-05 19:29:33.111926632 +0000 UTC m=+951.993812677" watchObservedRunningTime="2025-12-05 19:29:33.113735297 +0000 UTC m=+951.995621292" Dec 05 19:29:37 crc kubenswrapper[4982]: I1205 19:29:37.525555 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:37 crc kubenswrapper[4982]: I1205 19:29:37.526038 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:37 crc kubenswrapper[4982]: I1205 19:29:37.580540 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:38 crc kubenswrapper[4982]: I1205 19:29:38.246629 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:40 crc kubenswrapper[4982]: I1205 19:29:40.618492 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ffqr9"] Dec 05 19:29:40 crc kubenswrapper[4982]: I1205 19:29:40.618907 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ffqr9" podUID="3340096c-a963-4bbc-a77a-c064ab263f12" containerName="registry-server" containerID="cri-o://6b4bb241382d2a8e8df491c5057cb32a3b46856f3a5788861f20c8f6fe776fa2" gracePeriod=2 Dec 05 19:29:41 crc kubenswrapper[4982]: I1205 19:29:41.150818 4982 generic.go:334] "Generic (PLEG): container finished" podID="3340096c-a963-4bbc-a77a-c064ab263f12" containerID="6b4bb241382d2a8e8df491c5057cb32a3b46856f3a5788861f20c8f6fe776fa2" exitCode=0 Dec 05 19:29:41 crc kubenswrapper[4982]: I1205 19:29:41.150872 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffqr9" event={"ID":"3340096c-a963-4bbc-a77a-c064ab263f12","Type":"ContainerDied","Data":"6b4bb241382d2a8e8df491c5057cb32a3b46856f3a5788861f20c8f6fe776fa2"} Dec 05 19:29:41 crc kubenswrapper[4982]: I1205 19:29:41.603427 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:41 crc kubenswrapper[4982]: I1205 19:29:41.672766 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3340096c-a963-4bbc-a77a-c064ab263f12-catalog-content\") pod \"3340096c-a963-4bbc-a77a-c064ab263f12\" (UID: \"3340096c-a963-4bbc-a77a-c064ab263f12\") " Dec 05 19:29:41 crc kubenswrapper[4982]: I1205 19:29:41.672866 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cwrf8\" (UniqueName: \"kubernetes.io/projected/3340096c-a963-4bbc-a77a-c064ab263f12-kube-api-access-cwrf8\") pod \"3340096c-a963-4bbc-a77a-c064ab263f12\" (UID: \"3340096c-a963-4bbc-a77a-c064ab263f12\") " Dec 05 19:29:41 crc kubenswrapper[4982]: I1205 19:29:41.672947 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3340096c-a963-4bbc-a77a-c064ab263f12-utilities\") pod \"3340096c-a963-4bbc-a77a-c064ab263f12\" (UID: \"3340096c-a963-4bbc-a77a-c064ab263f12\") " Dec 05 19:29:41 crc kubenswrapper[4982]: I1205 19:29:41.673729 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3340096c-a963-4bbc-a77a-c064ab263f12-utilities" (OuterVolumeSpecName: "utilities") pod "3340096c-a963-4bbc-a77a-c064ab263f12" (UID: "3340096c-a963-4bbc-a77a-c064ab263f12"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:29:41 crc kubenswrapper[4982]: I1205 19:29:41.679192 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3340096c-a963-4bbc-a77a-c064ab263f12-kube-api-access-cwrf8" (OuterVolumeSpecName: "kube-api-access-cwrf8") pod "3340096c-a963-4bbc-a77a-c064ab263f12" (UID: "3340096c-a963-4bbc-a77a-c064ab263f12"). InnerVolumeSpecName "kube-api-access-cwrf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:29:41 crc kubenswrapper[4982]: I1205 19:29:41.733750 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3340096c-a963-4bbc-a77a-c064ab263f12-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3340096c-a963-4bbc-a77a-c064ab263f12" (UID: "3340096c-a963-4bbc-a77a-c064ab263f12"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:29:41 crc kubenswrapper[4982]: I1205 19:29:41.774626 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cwrf8\" (UniqueName: \"kubernetes.io/projected/3340096c-a963-4bbc-a77a-c064ab263f12-kube-api-access-cwrf8\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:41 crc kubenswrapper[4982]: I1205 19:29:41.774686 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3340096c-a963-4bbc-a77a-c064ab263f12-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:41 crc kubenswrapper[4982]: I1205 19:29:41.774699 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3340096c-a963-4bbc-a77a-c064ab263f12-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:29:42 crc kubenswrapper[4982]: I1205 19:29:42.159326 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ffqr9" event={"ID":"3340096c-a963-4bbc-a77a-c064ab263f12","Type":"ContainerDied","Data":"8a24775666a98515583c7d7a559781af12f6673c5d50999b23d636f8d74cf40d"} Dec 05 19:29:42 crc kubenswrapper[4982]: I1205 19:29:42.159393 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ffqr9" Dec 05 19:29:42 crc kubenswrapper[4982]: I1205 19:29:42.159414 4982 scope.go:117] "RemoveContainer" containerID="6b4bb241382d2a8e8df491c5057cb32a3b46856f3a5788861f20c8f6fe776fa2" Dec 05 19:29:42 crc kubenswrapper[4982]: I1205 19:29:42.175250 4982 scope.go:117] "RemoveContainer" containerID="7d555b4902353c98807ff371443ed91d88f73baefc4fad6aa6881a405670f09c" Dec 05 19:29:42 crc kubenswrapper[4982]: I1205 19:29:42.188247 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ffqr9"] Dec 05 19:29:42 crc kubenswrapper[4982]: I1205 19:29:42.192738 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ffqr9"] Dec 05 19:29:42 crc kubenswrapper[4982]: I1205 19:29:42.194263 4982 scope.go:117] "RemoveContainer" containerID="6b984e50e6d4ee04d010ceeb36f7184e6175532ab58fb79100c9c957437b444d" Dec 05 19:29:42 crc kubenswrapper[4982]: I1205 19:29:42.556554 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:29:42 crc kubenswrapper[4982]: I1205 19:29:42.556605 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:29:43 crc kubenswrapper[4982]: I1205 19:29:43.403829 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3340096c-a963-4bbc-a77a-c064ab263f12" path="/var/lib/kubelet/pods/3340096c-a963-4bbc-a77a-c064ab263f12/volumes" Dec 05 19:29:45 crc kubenswrapper[4982]: I1205 19:29:45.428179 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-8ddfdf549-2zbg6" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.239072 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-22r55"] Dec 05 19:29:48 crc kubenswrapper[4982]: E1205 19:29:48.239703 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3340096c-a963-4bbc-a77a-c064ab263f12" containerName="extract-utilities" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.239719 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3340096c-a963-4bbc-a77a-c064ab263f12" containerName="extract-utilities" Dec 05 19:29:48 crc kubenswrapper[4982]: E1205 19:29:48.239757 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3340096c-a963-4bbc-a77a-c064ab263f12" containerName="extract-content" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.239766 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3340096c-a963-4bbc-a77a-c064ab263f12" containerName="extract-content" Dec 05 19:29:48 crc kubenswrapper[4982]: E1205 19:29:48.239778 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3340096c-a963-4bbc-a77a-c064ab263f12" containerName="registry-server" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.239786 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3340096c-a963-4bbc-a77a-c064ab263f12" containerName="registry-server" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.239916 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3340096c-a963-4bbc-a77a-c064ab263f12" containerName="registry-server" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.241011 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.248587 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-22r55"] Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.358852 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7hvl\" (UniqueName: \"kubernetes.io/projected/c221a94b-b76b-45dd-b78c-132ea9270b88-kube-api-access-v7hvl\") pod \"certified-operators-22r55\" (UID: \"c221a94b-b76b-45dd-b78c-132ea9270b88\") " pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.359238 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c221a94b-b76b-45dd-b78c-132ea9270b88-catalog-content\") pod \"certified-operators-22r55\" (UID: \"c221a94b-b76b-45dd-b78c-132ea9270b88\") " pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.359266 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c221a94b-b76b-45dd-b78c-132ea9270b88-utilities\") pod \"certified-operators-22r55\" (UID: \"c221a94b-b76b-45dd-b78c-132ea9270b88\") " pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.460464 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7hvl\" (UniqueName: \"kubernetes.io/projected/c221a94b-b76b-45dd-b78c-132ea9270b88-kube-api-access-v7hvl\") pod \"certified-operators-22r55\" (UID: \"c221a94b-b76b-45dd-b78c-132ea9270b88\") " pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.460542 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c221a94b-b76b-45dd-b78c-132ea9270b88-catalog-content\") pod \"certified-operators-22r55\" (UID: \"c221a94b-b76b-45dd-b78c-132ea9270b88\") " pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.460567 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c221a94b-b76b-45dd-b78c-132ea9270b88-utilities\") pod \"certified-operators-22r55\" (UID: \"c221a94b-b76b-45dd-b78c-132ea9270b88\") " pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.460983 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c221a94b-b76b-45dd-b78c-132ea9270b88-utilities\") pod \"certified-operators-22r55\" (UID: \"c221a94b-b76b-45dd-b78c-132ea9270b88\") " pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.461038 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c221a94b-b76b-45dd-b78c-132ea9270b88-catalog-content\") pod \"certified-operators-22r55\" (UID: \"c221a94b-b76b-45dd-b78c-132ea9270b88\") " pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.481547 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7hvl\" (UniqueName: \"kubernetes.io/projected/c221a94b-b76b-45dd-b78c-132ea9270b88-kube-api-access-v7hvl\") pod \"certified-operators-22r55\" (UID: \"c221a94b-b76b-45dd-b78c-132ea9270b88\") " pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:48 crc kubenswrapper[4982]: I1205 19:29:48.619861 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:49 crc kubenswrapper[4982]: I1205 19:29:49.075562 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-22r55"] Dec 05 19:29:49 crc kubenswrapper[4982]: W1205 19:29:49.080000 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc221a94b_b76b_45dd_b78c_132ea9270b88.slice/crio-e66ddaa317bf44db2b6a6ea9805f6905266aa47d4f4a8cb99315c6027ddebd08 WatchSource:0}: Error finding container e66ddaa317bf44db2b6a6ea9805f6905266aa47d4f4a8cb99315c6027ddebd08: Status 404 returned error can't find the container with id e66ddaa317bf44db2b6a6ea9805f6905266aa47d4f4a8cb99315c6027ddebd08 Dec 05 19:29:49 crc kubenswrapper[4982]: I1205 19:29:49.203335 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22r55" event={"ID":"c221a94b-b76b-45dd-b78c-132ea9270b88","Type":"ContainerStarted","Data":"e66ddaa317bf44db2b6a6ea9805f6905266aa47d4f4a8cb99315c6027ddebd08"} Dec 05 19:29:50 crc kubenswrapper[4982]: I1205 19:29:50.211123 4982 generic.go:334] "Generic (PLEG): container finished" podID="c221a94b-b76b-45dd-b78c-132ea9270b88" containerID="32cb2d1388ca574f7983e485b64bb82922046870d837981772cdbd71426c780a" exitCode=0 Dec 05 19:29:50 crc kubenswrapper[4982]: I1205 19:29:50.211197 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22r55" event={"ID":"c221a94b-b76b-45dd-b78c-132ea9270b88","Type":"ContainerDied","Data":"32cb2d1388ca574f7983e485b64bb82922046870d837981772cdbd71426c780a"} Dec 05 19:29:51 crc kubenswrapper[4982]: I1205 19:29:51.220263 4982 generic.go:334] "Generic (PLEG): container finished" podID="c221a94b-b76b-45dd-b78c-132ea9270b88" containerID="c71621a537833cf68acfd24b611e3de09fb0b72fc693901f85097a8ca2a6aa3f" exitCode=0 Dec 05 19:29:51 crc kubenswrapper[4982]: I1205 19:29:51.220349 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22r55" event={"ID":"c221a94b-b76b-45dd-b78c-132ea9270b88","Type":"ContainerDied","Data":"c71621a537833cf68acfd24b611e3de09fb0b72fc693901f85097a8ca2a6aa3f"} Dec 05 19:29:52 crc kubenswrapper[4982]: I1205 19:29:52.227879 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22r55" event={"ID":"c221a94b-b76b-45dd-b78c-132ea9270b88","Type":"ContainerStarted","Data":"17e0a975942ff2ade2d8b7c24fb58353db92fa31cfb1b7b44701cb6867944358"} Dec 05 19:29:52 crc kubenswrapper[4982]: I1205 19:29:52.244836 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-22r55" podStartSLOduration=2.875055614 podStartE2EDuration="4.244822736s" podCreationTimestamp="2025-12-05 19:29:48 +0000 UTC" firstStartedPulling="2025-12-05 19:29:50.213573701 +0000 UTC m=+969.095459696" lastFinishedPulling="2025-12-05 19:29:51.583340823 +0000 UTC m=+970.465226818" observedRunningTime="2025-12-05 19:29:52.243657297 +0000 UTC m=+971.125543282" watchObservedRunningTime="2025-12-05 19:29:52.244822736 +0000 UTC m=+971.126708731" Dec 05 19:29:58 crc kubenswrapper[4982]: I1205 19:29:58.621212 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:58 crc kubenswrapper[4982]: I1205 19:29:58.621633 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:58 crc kubenswrapper[4982]: I1205 19:29:58.710453 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:29:59 crc kubenswrapper[4982]: I1205 19:29:59.305092 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.161941 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28"] Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.162944 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.165202 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.169284 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.169912 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28"] Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.212459 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33b1f837-d120-4042-9b3d-5b31342d5ebc-secret-volume\") pod \"collect-profiles-29416050-m4f28\" (UID: \"33b1f837-d120-4042-9b3d-5b31342d5ebc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.212956 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpx8k\" (UniqueName: \"kubernetes.io/projected/33b1f837-d120-4042-9b3d-5b31342d5ebc-kube-api-access-tpx8k\") pod \"collect-profiles-29416050-m4f28\" (UID: \"33b1f837-d120-4042-9b3d-5b31342d5ebc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.213174 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33b1f837-d120-4042-9b3d-5b31342d5ebc-config-volume\") pod \"collect-profiles-29416050-m4f28\" (UID: \"33b1f837-d120-4042-9b3d-5b31342d5ebc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.313995 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpx8k\" (UniqueName: \"kubernetes.io/projected/33b1f837-d120-4042-9b3d-5b31342d5ebc-kube-api-access-tpx8k\") pod \"collect-profiles-29416050-m4f28\" (UID: \"33b1f837-d120-4042-9b3d-5b31342d5ebc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.314409 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33b1f837-d120-4042-9b3d-5b31342d5ebc-config-volume\") pod \"collect-profiles-29416050-m4f28\" (UID: \"33b1f837-d120-4042-9b3d-5b31342d5ebc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.314589 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33b1f837-d120-4042-9b3d-5b31342d5ebc-secret-volume\") pod \"collect-profiles-29416050-m4f28\" (UID: \"33b1f837-d120-4042-9b3d-5b31342d5ebc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.315214 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33b1f837-d120-4042-9b3d-5b31342d5ebc-config-volume\") pod \"collect-profiles-29416050-m4f28\" (UID: \"33b1f837-d120-4042-9b3d-5b31342d5ebc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.330981 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33b1f837-d120-4042-9b3d-5b31342d5ebc-secret-volume\") pod \"collect-profiles-29416050-m4f28\" (UID: \"33b1f837-d120-4042-9b3d-5b31342d5ebc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.340481 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpx8k\" (UniqueName: \"kubernetes.io/projected/33b1f837-d120-4042-9b3d-5b31342d5ebc-kube-api-access-tpx8k\") pod \"collect-profiles-29416050-m4f28\" (UID: \"33b1f837-d120-4042-9b3d-5b31342d5ebc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.481416 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:00 crc kubenswrapper[4982]: I1205 19:30:00.917849 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28"] Dec 05 19:30:01 crc kubenswrapper[4982]: I1205 19:30:01.218331 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-22r55"] Dec 05 19:30:01 crc kubenswrapper[4982]: I1205 19:30:01.281935 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" event={"ID":"33b1f837-d120-4042-9b3d-5b31342d5ebc","Type":"ContainerStarted","Data":"ce13b31f04444cee316e1adc2f502544067896dc354bd2fbf3bb33baa1e6ebdf"} Dec 05 19:30:01 crc kubenswrapper[4982]: I1205 19:30:01.282479 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-22r55" podUID="c221a94b-b76b-45dd-b78c-132ea9270b88" containerName="registry-server" containerID="cri-o://17e0a975942ff2ade2d8b7c24fb58353db92fa31cfb1b7b44701cb6867944358" gracePeriod=2 Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.288794 4982 generic.go:334] "Generic (PLEG): container finished" podID="33b1f837-d120-4042-9b3d-5b31342d5ebc" containerID="ae422ad15b136efb8454598c3111d2668dff23e783585be1e0158da0ac726975" exitCode=0 Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.288888 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" event={"ID":"33b1f837-d120-4042-9b3d-5b31342d5ebc","Type":"ContainerDied","Data":"ae422ad15b136efb8454598c3111d2668dff23e783585be1e0158da0ac726975"} Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.291953 4982 generic.go:334] "Generic (PLEG): container finished" podID="c221a94b-b76b-45dd-b78c-132ea9270b88" containerID="17e0a975942ff2ade2d8b7c24fb58353db92fa31cfb1b7b44701cb6867944358" exitCode=0 Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.291996 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22r55" event={"ID":"c221a94b-b76b-45dd-b78c-132ea9270b88","Type":"ContainerDied","Data":"17e0a975942ff2ade2d8b7c24fb58353db92fa31cfb1b7b44701cb6867944358"} Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.292025 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-22r55" event={"ID":"c221a94b-b76b-45dd-b78c-132ea9270b88","Type":"ContainerDied","Data":"e66ddaa317bf44db2b6a6ea9805f6905266aa47d4f4a8cb99315c6027ddebd08"} Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.292040 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e66ddaa317bf44db2b6a6ea9805f6905266aa47d4f4a8cb99315c6027ddebd08" Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.309542 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.344937 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c221a94b-b76b-45dd-b78c-132ea9270b88-catalog-content\") pod \"c221a94b-b76b-45dd-b78c-132ea9270b88\" (UID: \"c221a94b-b76b-45dd-b78c-132ea9270b88\") " Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.344986 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7hvl\" (UniqueName: \"kubernetes.io/projected/c221a94b-b76b-45dd-b78c-132ea9270b88-kube-api-access-v7hvl\") pod \"c221a94b-b76b-45dd-b78c-132ea9270b88\" (UID: \"c221a94b-b76b-45dd-b78c-132ea9270b88\") " Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.345034 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c221a94b-b76b-45dd-b78c-132ea9270b88-utilities\") pod \"c221a94b-b76b-45dd-b78c-132ea9270b88\" (UID: \"c221a94b-b76b-45dd-b78c-132ea9270b88\") " Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.345836 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c221a94b-b76b-45dd-b78c-132ea9270b88-utilities" (OuterVolumeSpecName: "utilities") pod "c221a94b-b76b-45dd-b78c-132ea9270b88" (UID: "c221a94b-b76b-45dd-b78c-132ea9270b88"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.346021 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c221a94b-b76b-45dd-b78c-132ea9270b88-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.351071 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c221a94b-b76b-45dd-b78c-132ea9270b88-kube-api-access-v7hvl" (OuterVolumeSpecName: "kube-api-access-v7hvl") pod "c221a94b-b76b-45dd-b78c-132ea9270b88" (UID: "c221a94b-b76b-45dd-b78c-132ea9270b88"). InnerVolumeSpecName "kube-api-access-v7hvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.389220 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c221a94b-b76b-45dd-b78c-132ea9270b88-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c221a94b-b76b-45dd-b78c-132ea9270b88" (UID: "c221a94b-b76b-45dd-b78c-132ea9270b88"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.447660 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c221a94b-b76b-45dd-b78c-132ea9270b88-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:30:02 crc kubenswrapper[4982]: I1205 19:30:02.447696 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7hvl\" (UniqueName: \"kubernetes.io/projected/c221a94b-b76b-45dd-b78c-132ea9270b88-kube-api-access-v7hvl\") on node \"crc\" DevicePath \"\"" Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.297226 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-22r55" Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.325367 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-22r55"] Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.331424 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-22r55"] Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.399395 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c221a94b-b76b-45dd-b78c-132ea9270b88" path="/var/lib/kubelet/pods/c221a94b-b76b-45dd-b78c-132ea9270b88/volumes" Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.541409 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.560468 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33b1f837-d120-4042-9b3d-5b31342d5ebc-config-volume\") pod \"33b1f837-d120-4042-9b3d-5b31342d5ebc\" (UID: \"33b1f837-d120-4042-9b3d-5b31342d5ebc\") " Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.560621 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpx8k\" (UniqueName: \"kubernetes.io/projected/33b1f837-d120-4042-9b3d-5b31342d5ebc-kube-api-access-tpx8k\") pod \"33b1f837-d120-4042-9b3d-5b31342d5ebc\" (UID: \"33b1f837-d120-4042-9b3d-5b31342d5ebc\") " Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.560645 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33b1f837-d120-4042-9b3d-5b31342d5ebc-secret-volume\") pod \"33b1f837-d120-4042-9b3d-5b31342d5ebc\" (UID: \"33b1f837-d120-4042-9b3d-5b31342d5ebc\") " Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.561288 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33b1f837-d120-4042-9b3d-5b31342d5ebc-config-volume" (OuterVolumeSpecName: "config-volume") pod "33b1f837-d120-4042-9b3d-5b31342d5ebc" (UID: "33b1f837-d120-4042-9b3d-5b31342d5ebc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.567276 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/33b1f837-d120-4042-9b3d-5b31342d5ebc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "33b1f837-d120-4042-9b3d-5b31342d5ebc" (UID: "33b1f837-d120-4042-9b3d-5b31342d5ebc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.567428 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33b1f837-d120-4042-9b3d-5b31342d5ebc-kube-api-access-tpx8k" (OuterVolumeSpecName: "kube-api-access-tpx8k") pod "33b1f837-d120-4042-9b3d-5b31342d5ebc" (UID: "33b1f837-d120-4042-9b3d-5b31342d5ebc"). InnerVolumeSpecName "kube-api-access-tpx8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.662237 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpx8k\" (UniqueName: \"kubernetes.io/projected/33b1f837-d120-4042-9b3d-5b31342d5ebc-kube-api-access-tpx8k\") on node \"crc\" DevicePath \"\"" Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.662283 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/33b1f837-d120-4042-9b3d-5b31342d5ebc-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 19:30:03 crc kubenswrapper[4982]: I1205 19:30:03.662296 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/33b1f837-d120-4042-9b3d-5b31342d5ebc-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 19:30:04 crc kubenswrapper[4982]: I1205 19:30:04.304723 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" Dec 05 19:30:04 crc kubenswrapper[4982]: I1205 19:30:04.304644 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28" event={"ID":"33b1f837-d120-4042-9b3d-5b31342d5ebc","Type":"ContainerDied","Data":"ce13b31f04444cee316e1adc2f502544067896dc354bd2fbf3bb33baa1e6ebdf"} Dec 05 19:30:04 crc kubenswrapper[4982]: I1205 19:30:04.305277 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce13b31f04444cee316e1adc2f502544067896dc354bd2fbf3bb33baa1e6ebdf" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.106804 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7d4d6d9964-7tmb4" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.819979 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-pfpc6"] Dec 05 19:30:05 crc kubenswrapper[4982]: E1205 19:30:05.820380 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c221a94b-b76b-45dd-b78c-132ea9270b88" containerName="registry-server" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.820397 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c221a94b-b76b-45dd-b78c-132ea9270b88" containerName="registry-server" Dec 05 19:30:05 crc kubenswrapper[4982]: E1205 19:30:05.820414 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c221a94b-b76b-45dd-b78c-132ea9270b88" containerName="extract-content" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.820422 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c221a94b-b76b-45dd-b78c-132ea9270b88" containerName="extract-content" Dec 05 19:30:05 crc kubenswrapper[4982]: E1205 19:30:05.820439 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c221a94b-b76b-45dd-b78c-132ea9270b88" containerName="extract-utilities" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.820447 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c221a94b-b76b-45dd-b78c-132ea9270b88" containerName="extract-utilities" Dec 05 19:30:05 crc kubenswrapper[4982]: E1205 19:30:05.820465 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33b1f837-d120-4042-9b3d-5b31342d5ebc" containerName="collect-profiles" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.820472 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="33b1f837-d120-4042-9b3d-5b31342d5ebc" containerName="collect-profiles" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.820609 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c221a94b-b76b-45dd-b78c-132ea9270b88" containerName="registry-server" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.820622 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="33b1f837-d120-4042-9b3d-5b31342d5ebc" containerName="collect-profiles" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.823184 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.825161 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.825386 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-9qvnp" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.825827 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.826294 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp"] Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.827262 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.829387 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.832889 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp"] Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.889955 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/de9c5641-2450-4451-ab4e-dc16a34a094c-frr-conf\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.890326 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/de9c5641-2450-4451-ab4e-dc16a34a094c-metrics\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.890375 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/de9c5641-2450-4451-ab4e-dc16a34a094c-reloader\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.890420 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8ad46eb3-de5d-4122-82fe-5cf11faf01bc-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-t9drp\" (UID: \"8ad46eb3-de5d-4122-82fe-5cf11faf01bc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.890464 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cfct7\" (UniqueName: \"kubernetes.io/projected/8ad46eb3-de5d-4122-82fe-5cf11faf01bc-kube-api-access-cfct7\") pod \"frr-k8s-webhook-server-7fcb986d4-t9drp\" (UID: \"8ad46eb3-de5d-4122-82fe-5cf11faf01bc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.890496 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/de9c5641-2450-4451-ab4e-dc16a34a094c-frr-startup\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.890527 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfjhs\" (UniqueName: \"kubernetes.io/projected/de9c5641-2450-4451-ab4e-dc16a34a094c-kube-api-access-bfjhs\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.890547 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/de9c5641-2450-4451-ab4e-dc16a34a094c-frr-sockets\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.890574 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/de9c5641-2450-4451-ab4e-dc16a34a094c-metrics-certs\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.904535 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-68bx7"] Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.905398 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-68bx7" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.908084 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.908324 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-485pw" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.908561 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.908886 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.938567 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-f2lxm"] Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.939441 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.941241 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.953618 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-f2lxm"] Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.991608 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-metrics-certs\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.991665 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/de9c5641-2450-4451-ab4e-dc16a34a094c-reloader\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.991702 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e5934f26-82dc-4376-9073-0d24a57aadb1-cert\") pod \"controller-f8648f98b-f2lxm\" (UID: \"e5934f26-82dc-4376-9073-0d24a57aadb1\") " pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.991764 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5934f26-82dc-4376-9073-0d24a57aadb1-metrics-certs\") pod \"controller-f8648f98b-f2lxm\" (UID: \"e5934f26-82dc-4376-9073-0d24a57aadb1\") " pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.991791 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8ad46eb3-de5d-4122-82fe-5cf11faf01bc-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-t9drp\" (UID: \"8ad46eb3-de5d-4122-82fe-5cf11faf01bc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.991815 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-memberlist\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.991884 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cfct7\" (UniqueName: \"kubernetes.io/projected/8ad46eb3-de5d-4122-82fe-5cf11faf01bc-kube-api-access-cfct7\") pod \"frr-k8s-webhook-server-7fcb986d4-t9drp\" (UID: \"8ad46eb3-de5d-4122-82fe-5cf11faf01bc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.991928 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/de9c5641-2450-4451-ab4e-dc16a34a094c-frr-startup\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.991962 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfjhs\" (UniqueName: \"kubernetes.io/projected/de9c5641-2450-4451-ab4e-dc16a34a094c-kube-api-access-bfjhs\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.991983 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/de9c5641-2450-4451-ab4e-dc16a34a094c-frr-sockets\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.992016 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9d70d003-cbaf-4f61-b929-2803e9789657-metallb-excludel2\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.992041 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/de9c5641-2450-4451-ab4e-dc16a34a094c-metrics-certs\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.992068 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/de9c5641-2450-4451-ab4e-dc16a34a094c-frr-conf\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.992094 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/de9c5641-2450-4451-ab4e-dc16a34a094c-metrics\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.992108 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/de9c5641-2450-4451-ab4e-dc16a34a094c-reloader\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.992132 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69p7r\" (UniqueName: \"kubernetes.io/projected/9d70d003-cbaf-4f61-b929-2803e9789657-kube-api-access-69p7r\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:05 crc kubenswrapper[4982]: E1205 19:30:05.991928 4982 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.992178 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d5ps\" (UniqueName: \"kubernetes.io/projected/e5934f26-82dc-4376-9073-0d24a57aadb1-kube-api-access-9d5ps\") pod \"controller-f8648f98b-f2lxm\" (UID: \"e5934f26-82dc-4376-9073-0d24a57aadb1\") " pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:05 crc kubenswrapper[4982]: E1205 19:30:05.992210 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8ad46eb3-de5d-4122-82fe-5cf11faf01bc-cert podName:8ad46eb3-de5d-4122-82fe-5cf11faf01bc nodeName:}" failed. No retries permitted until 2025-12-05 19:30:06.492192999 +0000 UTC m=+985.374078994 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/8ad46eb3-de5d-4122-82fe-5cf11faf01bc-cert") pod "frr-k8s-webhook-server-7fcb986d4-t9drp" (UID: "8ad46eb3-de5d-4122-82fe-5cf11faf01bc") : secret "frr-k8s-webhook-server-cert" not found Dec 05 19:30:05 crc kubenswrapper[4982]: E1205 19:30:05.992223 4982 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 05 19:30:05 crc kubenswrapper[4982]: E1205 19:30:05.992266 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/de9c5641-2450-4451-ab4e-dc16a34a094c-metrics-certs podName:de9c5641-2450-4451-ab4e-dc16a34a094c nodeName:}" failed. No retries permitted until 2025-12-05 19:30:06.492251991 +0000 UTC m=+985.374137986 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/de9c5641-2450-4451-ab4e-dc16a34a094c-metrics-certs") pod "frr-k8s-pfpc6" (UID: "de9c5641-2450-4451-ab4e-dc16a34a094c") : secret "frr-k8s-certs-secret" not found Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.992477 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/de9c5641-2450-4451-ab4e-dc16a34a094c-frr-conf\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.992490 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/de9c5641-2450-4451-ab4e-dc16a34a094c-frr-sockets\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.992704 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/de9c5641-2450-4451-ab4e-dc16a34a094c-metrics\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:05 crc kubenswrapper[4982]: I1205 19:30:05.992950 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/de9c5641-2450-4451-ab4e-dc16a34a094c-frr-startup\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.010833 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfjhs\" (UniqueName: \"kubernetes.io/projected/de9c5641-2450-4451-ab4e-dc16a34a094c-kube-api-access-bfjhs\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.017691 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cfct7\" (UniqueName: \"kubernetes.io/projected/8ad46eb3-de5d-4122-82fe-5cf11faf01bc-kube-api-access-cfct7\") pod \"frr-k8s-webhook-server-7fcb986d4-t9drp\" (UID: \"8ad46eb3-de5d-4122-82fe-5cf11faf01bc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.092862 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9d70d003-cbaf-4f61-b929-2803e9789657-metallb-excludel2\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.092942 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69p7r\" (UniqueName: \"kubernetes.io/projected/9d70d003-cbaf-4f61-b929-2803e9789657-kube-api-access-69p7r\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.092971 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d5ps\" (UniqueName: \"kubernetes.io/projected/e5934f26-82dc-4376-9073-0d24a57aadb1-kube-api-access-9d5ps\") pod \"controller-f8648f98b-f2lxm\" (UID: \"e5934f26-82dc-4376-9073-0d24a57aadb1\") " pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.092993 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-metrics-certs\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.093018 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e5934f26-82dc-4376-9073-0d24a57aadb1-cert\") pod \"controller-f8648f98b-f2lxm\" (UID: \"e5934f26-82dc-4376-9073-0d24a57aadb1\") " pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.093044 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5934f26-82dc-4376-9073-0d24a57aadb1-metrics-certs\") pod \"controller-f8648f98b-f2lxm\" (UID: \"e5934f26-82dc-4376-9073-0d24a57aadb1\") " pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.093076 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-memberlist\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:06 crc kubenswrapper[4982]: E1205 19:30:06.093206 4982 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 19:30:06 crc kubenswrapper[4982]: E1205 19:30:06.093260 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-memberlist podName:9d70d003-cbaf-4f61-b929-2803e9789657 nodeName:}" failed. No retries permitted until 2025-12-05 19:30:06.59324388 +0000 UTC m=+985.475129875 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-memberlist") pod "speaker-68bx7" (UID: "9d70d003-cbaf-4f61-b929-2803e9789657") : secret "metallb-memberlist" not found Dec 05 19:30:06 crc kubenswrapper[4982]: E1205 19:30:06.093541 4982 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 05 19:30:06 crc kubenswrapper[4982]: E1205 19:30:06.093581 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-metrics-certs podName:9d70d003-cbaf-4f61-b929-2803e9789657 nodeName:}" failed. No retries permitted until 2025-12-05 19:30:06.593570459 +0000 UTC m=+985.475456454 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-metrics-certs") pod "speaker-68bx7" (UID: "9d70d003-cbaf-4f61-b929-2803e9789657") : secret "speaker-certs-secret" not found Dec 05 19:30:06 crc kubenswrapper[4982]: E1205 19:30:06.093672 4982 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 05 19:30:06 crc kubenswrapper[4982]: E1205 19:30:06.093759 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e5934f26-82dc-4376-9073-0d24a57aadb1-metrics-certs podName:e5934f26-82dc-4376-9073-0d24a57aadb1 nodeName:}" failed. No retries permitted until 2025-12-05 19:30:06.593741553 +0000 UTC m=+985.475627648 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e5934f26-82dc-4376-9073-0d24a57aadb1-metrics-certs") pod "controller-f8648f98b-f2lxm" (UID: "e5934f26-82dc-4376-9073-0d24a57aadb1") : secret "controller-certs-secret" not found Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.094074 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/9d70d003-cbaf-4f61-b929-2803e9789657-metallb-excludel2\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.094697 4982 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.106385 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e5934f26-82dc-4376-9073-0d24a57aadb1-cert\") pod \"controller-f8648f98b-f2lxm\" (UID: \"e5934f26-82dc-4376-9073-0d24a57aadb1\") " pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.109874 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69p7r\" (UniqueName: \"kubernetes.io/projected/9d70d003-cbaf-4f61-b929-2803e9789657-kube-api-access-69p7r\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.110814 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d5ps\" (UniqueName: \"kubernetes.io/projected/e5934f26-82dc-4376-9073-0d24a57aadb1-kube-api-access-9d5ps\") pod \"controller-f8648f98b-f2lxm\" (UID: \"e5934f26-82dc-4376-9073-0d24a57aadb1\") " pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.497855 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/de9c5641-2450-4451-ab4e-dc16a34a094c-metrics-certs\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.497992 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8ad46eb3-de5d-4122-82fe-5cf11faf01bc-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-t9drp\" (UID: \"8ad46eb3-de5d-4122-82fe-5cf11faf01bc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.501737 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/de9c5641-2450-4451-ab4e-dc16a34a094c-metrics-certs\") pod \"frr-k8s-pfpc6\" (UID: \"de9c5641-2450-4451-ab4e-dc16a34a094c\") " pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.502284 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8ad46eb3-de5d-4122-82fe-5cf11faf01bc-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-t9drp\" (UID: \"8ad46eb3-de5d-4122-82fe-5cf11faf01bc\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.599006 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-memberlist\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:06 crc kubenswrapper[4982]: E1205 19:30:06.599258 4982 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 19:30:06 crc kubenswrapper[4982]: E1205 19:30:06.599436 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-memberlist podName:9d70d003-cbaf-4f61-b929-2803e9789657 nodeName:}" failed. No retries permitted until 2025-12-05 19:30:07.59941843 +0000 UTC m=+986.481304425 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-memberlist") pod "speaker-68bx7" (UID: "9d70d003-cbaf-4f61-b929-2803e9789657") : secret "metallb-memberlist" not found Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.599572 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-metrics-certs\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.599734 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5934f26-82dc-4376-9073-0d24a57aadb1-metrics-certs\") pod \"controller-f8648f98b-f2lxm\" (UID: \"e5934f26-82dc-4376-9073-0d24a57aadb1\") " pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.602663 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-metrics-certs\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.603374 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e5934f26-82dc-4376-9073-0d24a57aadb1-metrics-certs\") pod \"controller-f8648f98b-f2lxm\" (UID: \"e5934f26-82dc-4376-9073-0d24a57aadb1\") " pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.742195 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.756044 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" Dec 05 19:30:06 crc kubenswrapper[4982]: I1205 19:30:06.854572 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:07 crc kubenswrapper[4982]: I1205 19:30:07.002872 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp"] Dec 05 19:30:07 crc kubenswrapper[4982]: I1205 19:30:07.155076 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-f2lxm"] Dec 05 19:30:07 crc kubenswrapper[4982]: W1205 19:30:07.160274 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5934f26_82dc_4376_9073_0d24a57aadb1.slice/crio-708c22adee8fd579e041361af3abe38db07f21529f145393c58c9a6f1f21d511 WatchSource:0}: Error finding container 708c22adee8fd579e041361af3abe38db07f21529f145393c58c9a6f1f21d511: Status 404 returned error can't find the container with id 708c22adee8fd579e041361af3abe38db07f21529f145393c58c9a6f1f21d511 Dec 05 19:30:07 crc kubenswrapper[4982]: I1205 19:30:07.321267 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-f2lxm" event={"ID":"e5934f26-82dc-4376-9073-0d24a57aadb1","Type":"ContainerStarted","Data":"0c5fe9418d7e0a4558bcdd4a1662319dc946973968b0f2550fc8e4ed6fd4bfa6"} Dec 05 19:30:07 crc kubenswrapper[4982]: I1205 19:30:07.321610 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-f2lxm" event={"ID":"e5934f26-82dc-4376-9073-0d24a57aadb1","Type":"ContainerStarted","Data":"708c22adee8fd579e041361af3abe38db07f21529f145393c58c9a6f1f21d511"} Dec 05 19:30:07 crc kubenswrapper[4982]: I1205 19:30:07.322299 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" event={"ID":"8ad46eb3-de5d-4122-82fe-5cf11faf01bc","Type":"ContainerStarted","Data":"5604b0bb7f1abd95ba0119546bb00e25ed62787349e665b819ec9d40046eef7a"} Dec 05 19:30:07 crc kubenswrapper[4982]: I1205 19:30:07.323054 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pfpc6" event={"ID":"de9c5641-2450-4451-ab4e-dc16a34a094c","Type":"ContainerStarted","Data":"5a70be0804843f3037a173984b591004403756a8745a529895f96c700fdc2214"} Dec 05 19:30:07 crc kubenswrapper[4982]: I1205 19:30:07.612828 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-memberlist\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:07 crc kubenswrapper[4982]: I1205 19:30:07.620602 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/9d70d003-cbaf-4f61-b929-2803e9789657-memberlist\") pod \"speaker-68bx7\" (UID: \"9d70d003-cbaf-4f61-b929-2803e9789657\") " pod="metallb-system/speaker-68bx7" Dec 05 19:30:07 crc kubenswrapper[4982]: I1205 19:30:07.719986 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-68bx7" Dec 05 19:30:07 crc kubenswrapper[4982]: W1205 19:30:07.740200 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d70d003_cbaf_4f61_b929_2803e9789657.slice/crio-8d47441b927b7ed2028ee5e7e66f6c32d26b5ead72f2ab2af6ec1742727544d2 WatchSource:0}: Error finding container 8d47441b927b7ed2028ee5e7e66f6c32d26b5ead72f2ab2af6ec1742727544d2: Status 404 returned error can't find the container with id 8d47441b927b7ed2028ee5e7e66f6c32d26b5ead72f2ab2af6ec1742727544d2 Dec 05 19:30:08 crc kubenswrapper[4982]: I1205 19:30:08.337298 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-68bx7" event={"ID":"9d70d003-cbaf-4f61-b929-2803e9789657","Type":"ContainerStarted","Data":"c74b252e008c2c32763f3acde4beb7e714d4a21e174abbd64d553ac5af28ad7e"} Dec 05 19:30:08 crc kubenswrapper[4982]: I1205 19:30:08.337598 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-68bx7" event={"ID":"9d70d003-cbaf-4f61-b929-2803e9789657","Type":"ContainerStarted","Data":"ca9916806041f64896c6607c63b94399662f55e3dac93b2c02ab257a62139aa6"} Dec 05 19:30:08 crc kubenswrapper[4982]: I1205 19:30:08.337607 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-68bx7" event={"ID":"9d70d003-cbaf-4f61-b929-2803e9789657","Type":"ContainerStarted","Data":"8d47441b927b7ed2028ee5e7e66f6c32d26b5ead72f2ab2af6ec1742727544d2"} Dec 05 19:30:08 crc kubenswrapper[4982]: I1205 19:30:08.337799 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-68bx7" Dec 05 19:30:08 crc kubenswrapper[4982]: I1205 19:30:08.339890 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-f2lxm" event={"ID":"e5934f26-82dc-4376-9073-0d24a57aadb1","Type":"ContainerStarted","Data":"1c2a1568055c68d1e9c93e86dd676fe65b654fb4b96c59410196d5e79d348795"} Dec 05 19:30:08 crc kubenswrapper[4982]: I1205 19:30:08.340326 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:08 crc kubenswrapper[4982]: I1205 19:30:08.369654 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-68bx7" podStartSLOduration=3.369636897 podStartE2EDuration="3.369636897s" podCreationTimestamp="2025-12-05 19:30:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:30:08.352855025 +0000 UTC m=+987.234741030" watchObservedRunningTime="2025-12-05 19:30:08.369636897 +0000 UTC m=+987.251522902" Dec 05 19:30:11 crc kubenswrapper[4982]: I1205 19:30:11.418858 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-f2lxm" podStartSLOduration=6.418835829 podStartE2EDuration="6.418835829s" podCreationTimestamp="2025-12-05 19:30:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:30:08.373460433 +0000 UTC m=+987.255346428" watchObservedRunningTime="2025-12-05 19:30:11.418835829 +0000 UTC m=+990.300721844" Dec 05 19:30:12 crc kubenswrapper[4982]: I1205 19:30:12.556809 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:30:12 crc kubenswrapper[4982]: I1205 19:30:12.556869 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:30:14 crc kubenswrapper[4982]: I1205 19:30:14.383226 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" event={"ID":"8ad46eb3-de5d-4122-82fe-5cf11faf01bc","Type":"ContainerStarted","Data":"72165e2dd27d1901d1a04be1a7d34e264159a5315bd29279a7b2c856b23680ae"} Dec 05 19:30:14 crc kubenswrapper[4982]: I1205 19:30:14.383996 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" Dec 05 19:30:14 crc kubenswrapper[4982]: I1205 19:30:14.404534 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" podStartSLOduration=2.91287613 podStartE2EDuration="9.404503203s" podCreationTimestamp="2025-12-05 19:30:05 +0000 UTC" firstStartedPulling="2025-12-05 19:30:07.029016783 +0000 UTC m=+985.910902778" lastFinishedPulling="2025-12-05 19:30:13.520643816 +0000 UTC m=+992.402529851" observedRunningTime="2025-12-05 19:30:14.401257041 +0000 UTC m=+993.283143046" watchObservedRunningTime="2025-12-05 19:30:14.404503203 +0000 UTC m=+993.286389268" Dec 05 19:30:17 crc kubenswrapper[4982]: I1205 19:30:17.405056 4982 generic.go:334] "Generic (PLEG): container finished" podID="de9c5641-2450-4451-ab4e-dc16a34a094c" containerID="16add46b5af6d7d1e83743ce255bdaba6c97cdac8157aa3a6f8a879db17c5386" exitCode=0 Dec 05 19:30:17 crc kubenswrapper[4982]: I1205 19:30:17.410321 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pfpc6" event={"ID":"de9c5641-2450-4451-ab4e-dc16a34a094c","Type":"ContainerDied","Data":"16add46b5af6d7d1e83743ce255bdaba6c97cdac8157aa3a6f8a879db17c5386"} Dec 05 19:30:18 crc kubenswrapper[4982]: I1205 19:30:18.412442 4982 generic.go:334] "Generic (PLEG): container finished" podID="de9c5641-2450-4451-ab4e-dc16a34a094c" containerID="b6e04195f0351d334f1d850bade27994987404403cf8a5abb7908754e47d66b9" exitCode=0 Dec 05 19:30:18 crc kubenswrapper[4982]: I1205 19:30:18.412494 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pfpc6" event={"ID":"de9c5641-2450-4451-ab4e-dc16a34a094c","Type":"ContainerDied","Data":"b6e04195f0351d334f1d850bade27994987404403cf8a5abb7908754e47d66b9"} Dec 05 19:30:19 crc kubenswrapper[4982]: I1205 19:30:19.425593 4982 generic.go:334] "Generic (PLEG): container finished" podID="de9c5641-2450-4451-ab4e-dc16a34a094c" containerID="4f924badde2c367a40a5bd51271b8b9319820269f38051b9ea558edd235c7a4e" exitCode=0 Dec 05 19:30:19 crc kubenswrapper[4982]: I1205 19:30:19.425651 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pfpc6" event={"ID":"de9c5641-2450-4451-ab4e-dc16a34a094c","Type":"ContainerDied","Data":"4f924badde2c367a40a5bd51271b8b9319820269f38051b9ea558edd235c7a4e"} Dec 05 19:30:20 crc kubenswrapper[4982]: I1205 19:30:20.437417 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pfpc6" event={"ID":"de9c5641-2450-4451-ab4e-dc16a34a094c","Type":"ContainerStarted","Data":"8deef605274c865a03a56a0fc85e84eab503bb3ac891ebc94e50ffd49dfd2e9d"} Dec 05 19:30:20 crc kubenswrapper[4982]: I1205 19:30:20.437710 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pfpc6" event={"ID":"de9c5641-2450-4451-ab4e-dc16a34a094c","Type":"ContainerStarted","Data":"15df241925be3c5fdd31b3cc8f989f07939c0c2e46907a56cb2add8255649b50"} Dec 05 19:30:20 crc kubenswrapper[4982]: I1205 19:30:20.437720 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pfpc6" event={"ID":"de9c5641-2450-4451-ab4e-dc16a34a094c","Type":"ContainerStarted","Data":"e98fed70eda4498cd31b5832ca8841a7d84fad853a3716e21bfde87016d9b6fc"} Dec 05 19:30:20 crc kubenswrapper[4982]: I1205 19:30:20.437728 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pfpc6" event={"ID":"de9c5641-2450-4451-ab4e-dc16a34a094c","Type":"ContainerStarted","Data":"f1a02669907c87d973564bbe1f7bd696104a1e97c02ebfeca7e6a15de12aee10"} Dec 05 19:30:20 crc kubenswrapper[4982]: I1205 19:30:20.437736 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pfpc6" event={"ID":"de9c5641-2450-4451-ab4e-dc16a34a094c","Type":"ContainerStarted","Data":"8a8721d4fbd87b5c1160a28684d831daa1c7eb4d9887405f78830c07a3dc6602"} Dec 05 19:30:21 crc kubenswrapper[4982]: I1205 19:30:21.462783 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-pfpc6" event={"ID":"de9c5641-2450-4451-ab4e-dc16a34a094c","Type":"ContainerStarted","Data":"ca113e59d84dea0fb8b8852e22d335f3074cfd47b33b050785251a187fcf5bd1"} Dec 05 19:30:21 crc kubenswrapper[4982]: I1205 19:30:21.463009 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:21 crc kubenswrapper[4982]: I1205 19:30:21.494233 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-pfpc6" podStartSLOduration=6.677848322 podStartE2EDuration="16.494207426s" podCreationTimestamp="2025-12-05 19:30:05 +0000 UTC" firstStartedPulling="2025-12-05 19:30:06.905432815 +0000 UTC m=+985.787318810" lastFinishedPulling="2025-12-05 19:30:16.721791919 +0000 UTC m=+995.603677914" observedRunningTime="2025-12-05 19:30:21.49001214 +0000 UTC m=+1000.371898145" watchObservedRunningTime="2025-12-05 19:30:21.494207426 +0000 UTC m=+1000.376093431" Dec 05 19:30:21 crc kubenswrapper[4982]: I1205 19:30:21.742868 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:21 crc kubenswrapper[4982]: I1205 19:30:21.806629 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:26 crc kubenswrapper[4982]: I1205 19:30:26.763586 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-t9drp" Dec 05 19:30:26 crc kubenswrapper[4982]: I1205 19:30:26.861084 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-f2lxm" Dec 05 19:30:27 crc kubenswrapper[4982]: I1205 19:30:27.733138 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-68bx7" Dec 05 19:30:34 crc kubenswrapper[4982]: I1205 19:30:34.026183 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-2f2gp"] Dec 05 19:30:34 crc kubenswrapper[4982]: I1205 19:30:34.027663 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2f2gp" Dec 05 19:30:34 crc kubenswrapper[4982]: I1205 19:30:34.030284 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 19:30:34 crc kubenswrapper[4982]: I1205 19:30:34.031340 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-bdtl9" Dec 05 19:30:34 crc kubenswrapper[4982]: I1205 19:30:34.038024 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 19:30:34 crc kubenswrapper[4982]: I1205 19:30:34.039127 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2f2gp"] Dec 05 19:30:34 crc kubenswrapper[4982]: I1205 19:30:34.039798 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-476sf\" (UniqueName: \"kubernetes.io/projected/c5e545c3-45c4-4c76-a6cf-e45385919b9d-kube-api-access-476sf\") pod \"openstack-operator-index-2f2gp\" (UID: \"c5e545c3-45c4-4c76-a6cf-e45385919b9d\") " pod="openstack-operators/openstack-operator-index-2f2gp" Dec 05 19:30:34 crc kubenswrapper[4982]: I1205 19:30:34.141596 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-476sf\" (UniqueName: \"kubernetes.io/projected/c5e545c3-45c4-4c76-a6cf-e45385919b9d-kube-api-access-476sf\") pod \"openstack-operator-index-2f2gp\" (UID: \"c5e545c3-45c4-4c76-a6cf-e45385919b9d\") " pod="openstack-operators/openstack-operator-index-2f2gp" Dec 05 19:30:34 crc kubenswrapper[4982]: I1205 19:30:34.159550 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-476sf\" (UniqueName: \"kubernetes.io/projected/c5e545c3-45c4-4c76-a6cf-e45385919b9d-kube-api-access-476sf\") pod \"openstack-operator-index-2f2gp\" (UID: \"c5e545c3-45c4-4c76-a6cf-e45385919b9d\") " pod="openstack-operators/openstack-operator-index-2f2gp" Dec 05 19:30:34 crc kubenswrapper[4982]: I1205 19:30:34.352673 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-2f2gp" Dec 05 19:30:34 crc kubenswrapper[4982]: I1205 19:30:34.853904 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-2f2gp"] Dec 05 19:30:35 crc kubenswrapper[4982]: I1205 19:30:35.784648 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2f2gp" event={"ID":"c5e545c3-45c4-4c76-a6cf-e45385919b9d","Type":"ContainerStarted","Data":"e2b8a81e8d61c44444ac4078e90f52fa3ef08d7ea266478813c11f17222ac277"} Dec 05 19:30:36 crc kubenswrapper[4982]: I1205 19:30:36.747265 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-pfpc6" Dec 05 19:30:37 crc kubenswrapper[4982]: I1205 19:30:37.799931 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-2f2gp" event={"ID":"c5e545c3-45c4-4c76-a6cf-e45385919b9d","Type":"ContainerStarted","Data":"9a4f12b4129fe583f532af520b852868eaad0a3fd781a1db58628dcaaf570f43"} Dec 05 19:30:37 crc kubenswrapper[4982]: I1205 19:30:37.836066 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-2f2gp" podStartSLOduration=1.61187432 podStartE2EDuration="3.836013863s" podCreationTimestamp="2025-12-05 19:30:34 +0000 UTC" firstStartedPulling="2025-12-05 19:30:34.863141771 +0000 UTC m=+1013.745027756" lastFinishedPulling="2025-12-05 19:30:37.087281284 +0000 UTC m=+1015.969167299" observedRunningTime="2025-12-05 19:30:37.81243615 +0000 UTC m=+1016.694322175" watchObservedRunningTime="2025-12-05 19:30:37.836013863 +0000 UTC m=+1016.717899848" Dec 05 19:30:42 crc kubenswrapper[4982]: I1205 19:30:42.557497 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:30:42 crc kubenswrapper[4982]: I1205 19:30:42.557850 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:30:42 crc kubenswrapper[4982]: I1205 19:30:42.557900 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:30:42 crc kubenswrapper[4982]: I1205 19:30:42.558619 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a3c43aed6bce4bef46a60d053a18694c6e39d12312a98b59d3e2035ce4e4dc6e"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 19:30:42 crc kubenswrapper[4982]: I1205 19:30:42.558687 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://a3c43aed6bce4bef46a60d053a18694c6e39d12312a98b59d3e2035ce4e4dc6e" gracePeriod=600 Dec 05 19:30:42 crc kubenswrapper[4982]: I1205 19:30:42.841384 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="a3c43aed6bce4bef46a60d053a18694c6e39d12312a98b59d3e2035ce4e4dc6e" exitCode=0 Dec 05 19:30:42 crc kubenswrapper[4982]: I1205 19:30:42.841597 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"a3c43aed6bce4bef46a60d053a18694c6e39d12312a98b59d3e2035ce4e4dc6e"} Dec 05 19:30:42 crc kubenswrapper[4982]: I1205 19:30:42.841650 4982 scope.go:117] "RemoveContainer" containerID="57e1b3de34ce27c53857e515661add0f1cc0b0ab673485f39e540b3920f02c13" Dec 05 19:30:43 crc kubenswrapper[4982]: I1205 19:30:43.850935 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"1a1fd81965ac1ad943b31d17af30468278dd74e344d34225c855144a8dd5abed"} Dec 05 19:30:44 crc kubenswrapper[4982]: I1205 19:30:44.353348 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-2f2gp" Dec 05 19:30:44 crc kubenswrapper[4982]: I1205 19:30:44.353420 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-2f2gp" Dec 05 19:30:44 crc kubenswrapper[4982]: I1205 19:30:44.380906 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-2f2gp" Dec 05 19:30:44 crc kubenswrapper[4982]: I1205 19:30:44.891248 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-2f2gp" Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.281375 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn"] Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.284376 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.287180 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-vs9z4" Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.293052 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn"] Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.326293 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9kkd\" (UniqueName: \"kubernetes.io/projected/d15a633b-7ce9-4676-9431-f5e40ec5a019-kube-api-access-w9kkd\") pod \"a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn\" (UID: \"d15a633b-7ce9-4676-9431-f5e40ec5a019\") " pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.326358 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d15a633b-7ce9-4676-9431-f5e40ec5a019-util\") pod \"a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn\" (UID: \"d15a633b-7ce9-4676-9431-f5e40ec5a019\") " pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.326438 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d15a633b-7ce9-4676-9431-f5e40ec5a019-bundle\") pod \"a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn\" (UID: \"d15a633b-7ce9-4676-9431-f5e40ec5a019\") " pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.427395 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9kkd\" (UniqueName: \"kubernetes.io/projected/d15a633b-7ce9-4676-9431-f5e40ec5a019-kube-api-access-w9kkd\") pod \"a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn\" (UID: \"d15a633b-7ce9-4676-9431-f5e40ec5a019\") " pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.427904 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d15a633b-7ce9-4676-9431-f5e40ec5a019-util\") pod \"a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn\" (UID: \"d15a633b-7ce9-4676-9431-f5e40ec5a019\") " pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.428592 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d15a633b-7ce9-4676-9431-f5e40ec5a019-util\") pod \"a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn\" (UID: \"d15a633b-7ce9-4676-9431-f5e40ec5a019\") " pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.428638 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d15a633b-7ce9-4676-9431-f5e40ec5a019-bundle\") pod \"a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn\" (UID: \"d15a633b-7ce9-4676-9431-f5e40ec5a019\") " pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.429058 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d15a633b-7ce9-4676-9431-f5e40ec5a019-bundle\") pod \"a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn\" (UID: \"d15a633b-7ce9-4676-9431-f5e40ec5a019\") " pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.452125 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9kkd\" (UniqueName: \"kubernetes.io/projected/d15a633b-7ce9-4676-9431-f5e40ec5a019-kube-api-access-w9kkd\") pod \"a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn\" (UID: \"d15a633b-7ce9-4676-9431-f5e40ec5a019\") " pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:47 crc kubenswrapper[4982]: I1205 19:30:47.609390 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:48 crc kubenswrapper[4982]: I1205 19:30:48.057008 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn"] Dec 05 19:30:48 crc kubenswrapper[4982]: I1205 19:30:48.896983 4982 generic.go:334] "Generic (PLEG): container finished" podID="d15a633b-7ce9-4676-9431-f5e40ec5a019" containerID="d9bda43a91cb23cf731300a01dca0f7d489f2703ed8cf8d892a400f68cc0874e" exitCode=0 Dec 05 19:30:48 crc kubenswrapper[4982]: I1205 19:30:48.897107 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" event={"ID":"d15a633b-7ce9-4676-9431-f5e40ec5a019","Type":"ContainerDied","Data":"d9bda43a91cb23cf731300a01dca0f7d489f2703ed8cf8d892a400f68cc0874e"} Dec 05 19:30:48 crc kubenswrapper[4982]: I1205 19:30:48.897384 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" event={"ID":"d15a633b-7ce9-4676-9431-f5e40ec5a019","Type":"ContainerStarted","Data":"41cead31285fa4dce576d2d3fd73fa8729cd619ca695a3f7bea4a70b04b6d72c"} Dec 05 19:30:48 crc kubenswrapper[4982]: I1205 19:30:48.901229 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 19:30:49 crc kubenswrapper[4982]: I1205 19:30:49.906704 4982 generic.go:334] "Generic (PLEG): container finished" podID="d15a633b-7ce9-4676-9431-f5e40ec5a019" containerID="e173ea059f62718b6349f9b66f6e3fd9a905a420a9393a6cc898794ecda96785" exitCode=0 Dec 05 19:30:49 crc kubenswrapper[4982]: I1205 19:30:49.906750 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" event={"ID":"d15a633b-7ce9-4676-9431-f5e40ec5a019","Type":"ContainerDied","Data":"e173ea059f62718b6349f9b66f6e3fd9a905a420a9393a6cc898794ecda96785"} Dec 05 19:30:50 crc kubenswrapper[4982]: I1205 19:30:50.921641 4982 generic.go:334] "Generic (PLEG): container finished" podID="d15a633b-7ce9-4676-9431-f5e40ec5a019" containerID="f4228b2cc13b6839f6bbcac61b40ec2c1dabb421edd4ab573e0a601d418638bb" exitCode=0 Dec 05 19:30:50 crc kubenswrapper[4982]: I1205 19:30:50.921717 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" event={"ID":"d15a633b-7ce9-4676-9431-f5e40ec5a019","Type":"ContainerDied","Data":"f4228b2cc13b6839f6bbcac61b40ec2c1dabb421edd4ab573e0a601d418638bb"} Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.214486 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.324862 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d15a633b-7ce9-4676-9431-f5e40ec5a019-bundle\") pod \"d15a633b-7ce9-4676-9431-f5e40ec5a019\" (UID: \"d15a633b-7ce9-4676-9431-f5e40ec5a019\") " Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.325008 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d15a633b-7ce9-4676-9431-f5e40ec5a019-util\") pod \"d15a633b-7ce9-4676-9431-f5e40ec5a019\" (UID: \"d15a633b-7ce9-4676-9431-f5e40ec5a019\") " Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.325072 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9kkd\" (UniqueName: \"kubernetes.io/projected/d15a633b-7ce9-4676-9431-f5e40ec5a019-kube-api-access-w9kkd\") pod \"d15a633b-7ce9-4676-9431-f5e40ec5a019\" (UID: \"d15a633b-7ce9-4676-9431-f5e40ec5a019\") " Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.326548 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d15a633b-7ce9-4676-9431-f5e40ec5a019-bundle" (OuterVolumeSpecName: "bundle") pod "d15a633b-7ce9-4676-9431-f5e40ec5a019" (UID: "d15a633b-7ce9-4676-9431-f5e40ec5a019"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.333287 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d15a633b-7ce9-4676-9431-f5e40ec5a019-kube-api-access-w9kkd" (OuterVolumeSpecName: "kube-api-access-w9kkd") pod "d15a633b-7ce9-4676-9431-f5e40ec5a019" (UID: "d15a633b-7ce9-4676-9431-f5e40ec5a019"). InnerVolumeSpecName "kube-api-access-w9kkd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.340581 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d15a633b-7ce9-4676-9431-f5e40ec5a019-util" (OuterVolumeSpecName: "util") pod "d15a633b-7ce9-4676-9431-f5e40ec5a019" (UID: "d15a633b-7ce9-4676-9431-f5e40ec5a019"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.426884 4982 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d15a633b-7ce9-4676-9431-f5e40ec5a019-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.426924 4982 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d15a633b-7ce9-4676-9431-f5e40ec5a019-util\") on node \"crc\" DevicePath \"\"" Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.426937 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9kkd\" (UniqueName: \"kubernetes.io/projected/d15a633b-7ce9-4676-9431-f5e40ec5a019-kube-api-access-w9kkd\") on node \"crc\" DevicePath \"\"" Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.938472 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" event={"ID":"d15a633b-7ce9-4676-9431-f5e40ec5a019","Type":"ContainerDied","Data":"41cead31285fa4dce576d2d3fd73fa8729cd619ca695a3f7bea4a70b04b6d72c"} Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.938756 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="41cead31285fa4dce576d2d3fd73fa8729cd619ca695a3f7bea4a70b04b6d72c" Dec 05 19:30:52 crc kubenswrapper[4982]: I1205 19:30:52.938551 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn" Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.139073 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6"] Dec 05 19:30:59 crc kubenswrapper[4982]: E1205 19:30:59.139819 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d15a633b-7ce9-4676-9431-f5e40ec5a019" containerName="extract" Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.139836 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d15a633b-7ce9-4676-9431-f5e40ec5a019" containerName="extract" Dec 05 19:30:59 crc kubenswrapper[4982]: E1205 19:30:59.139863 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d15a633b-7ce9-4676-9431-f5e40ec5a019" containerName="pull" Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.139872 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d15a633b-7ce9-4676-9431-f5e40ec5a019" containerName="pull" Dec 05 19:30:59 crc kubenswrapper[4982]: E1205 19:30:59.139885 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d15a633b-7ce9-4676-9431-f5e40ec5a019" containerName="util" Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.139894 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d15a633b-7ce9-4676-9431-f5e40ec5a019" containerName="util" Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.140030 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d15a633b-7ce9-4676-9431-f5e40ec5a019" containerName="extract" Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.140563 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6" Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.142741 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-j8fc7" Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.168142 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6"] Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.228346 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68wk7\" (UniqueName: \"kubernetes.io/projected/5619a079-821c-49cb-84f5-136f41ff45a5-kube-api-access-68wk7\") pod \"openstack-operator-controller-operator-79fbdbdfdc-dt7k6\" (UID: \"5619a079-821c-49cb-84f5-136f41ff45a5\") " pod="openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6" Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.329933 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68wk7\" (UniqueName: \"kubernetes.io/projected/5619a079-821c-49cb-84f5-136f41ff45a5-kube-api-access-68wk7\") pod \"openstack-operator-controller-operator-79fbdbdfdc-dt7k6\" (UID: \"5619a079-821c-49cb-84f5-136f41ff45a5\") " pod="openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6" Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.377884 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68wk7\" (UniqueName: \"kubernetes.io/projected/5619a079-821c-49cb-84f5-136f41ff45a5-kube-api-access-68wk7\") pod \"openstack-operator-controller-operator-79fbdbdfdc-dt7k6\" (UID: \"5619a079-821c-49cb-84f5-136f41ff45a5\") " pod="openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6" Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.459425 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6" Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.709119 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6"] Dec 05 19:30:59 crc kubenswrapper[4982]: I1205 19:30:59.991241 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6" event={"ID":"5619a079-821c-49cb-84f5-136f41ff45a5","Type":"ContainerStarted","Data":"fc2ae56f4979b4b0df4338bf2e787bab25a195bbd159a8f559cb46d0762bd7d2"} Dec 05 19:31:04 crc kubenswrapper[4982]: I1205 19:31:04.021398 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6" event={"ID":"5619a079-821c-49cb-84f5-136f41ff45a5","Type":"ContainerStarted","Data":"8b9fb73c3fc4e43ca78f47c5e698a2c915745d398e6f648d84ddd5158e691d2d"} Dec 05 19:31:04 crc kubenswrapper[4982]: I1205 19:31:04.021963 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6" Dec 05 19:31:04 crc kubenswrapper[4982]: I1205 19:31:04.053621 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6" podStartSLOduration=1.6621881200000002 podStartE2EDuration="5.053592437s" podCreationTimestamp="2025-12-05 19:30:59 +0000 UTC" firstStartedPulling="2025-12-05 19:30:59.72931633 +0000 UTC m=+1038.611202325" lastFinishedPulling="2025-12-05 19:31:03.120720637 +0000 UTC m=+1042.002606642" observedRunningTime="2025-12-05 19:31:04.049883394 +0000 UTC m=+1042.931769429" watchObservedRunningTime="2025-12-05 19:31:04.053592437 +0000 UTC m=+1042.935478502" Dec 05 19:31:09 crc kubenswrapper[4982]: I1205 19:31:09.461844 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-79fbdbdfdc-dt7k6" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.429555 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.431221 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.433261 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-crp52" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.439197 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.457745 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.459084 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.464132 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-4h2t6" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.466531 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.467479 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.481427 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.483858 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-tbccf" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.488618 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.496488 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.500503 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.507934 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-29tt2" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.518824 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.537967 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9zpt\" (UniqueName: \"kubernetes.io/projected/517bb39b-5710-45f0-b70a-694dc5b4d044-kube-api-access-f9zpt\") pod \"glance-operator-controller-manager-77987cd8cd-p6gnj\" (UID: \"517bb39b-5710-45f0-b70a-694dc5b4d044\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.538031 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkzc2\" (UniqueName: \"kubernetes.io/projected/b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a-kube-api-access-fkzc2\") pod \"cinder-operator-controller-manager-859b6ccc6-wrfxk\" (UID: \"b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.538110 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sntk\" (UniqueName: \"kubernetes.io/projected/55a262ed-8b7c-4e28-af82-89c5df1f675b-kube-api-access-8sntk\") pod \"designate-operator-controller-manager-78b4bc895b-t2zqn\" (UID: \"55a262ed-8b7c-4e28-af82-89c5df1f675b\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.538175 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlvfb\" (UniqueName: \"kubernetes.io/projected/ebffce09-7b77-4c54-9c5a-520517cc3aa8-kube-api-access-qlvfb\") pod \"barbican-operator-controller-manager-7d9dfd778-f8852\" (UID: \"ebffce09-7b77-4c54-9c5a-520517cc3aa8\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.554202 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.555229 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.560638 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-4k26r" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.563661 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.564570 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.566867 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-pf4p6" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.572203 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.584653 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.590274 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.591369 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.597757 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-z76cf" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.597989 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.612538 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.638821 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert\") pod \"infra-operator-controller-manager-57548d458d-fnkqx\" (UID: \"ab5cdcbf-c82f-48be-a97d-65a856e95bd9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.638874 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sntk\" (UniqueName: \"kubernetes.io/projected/55a262ed-8b7c-4e28-af82-89c5df1f675b-kube-api-access-8sntk\") pod \"designate-operator-controller-manager-78b4bc895b-t2zqn\" (UID: \"55a262ed-8b7c-4e28-af82-89c5df1f675b\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.638919 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ctxb\" (UniqueName: \"kubernetes.io/projected/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-kube-api-access-4ctxb\") pod \"infra-operator-controller-manager-57548d458d-fnkqx\" (UID: \"ab5cdcbf-c82f-48be-a97d-65a856e95bd9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.638953 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlvfb\" (UniqueName: \"kubernetes.io/projected/ebffce09-7b77-4c54-9c5a-520517cc3aa8-kube-api-access-qlvfb\") pod \"barbican-operator-controller-manager-7d9dfd778-f8852\" (UID: \"ebffce09-7b77-4c54-9c5a-520517cc3aa8\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.638985 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9zpt\" (UniqueName: \"kubernetes.io/projected/517bb39b-5710-45f0-b70a-694dc5b4d044-kube-api-access-f9zpt\") pod \"glance-operator-controller-manager-77987cd8cd-p6gnj\" (UID: \"517bb39b-5710-45f0-b70a-694dc5b4d044\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.639009 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkzc2\" (UniqueName: \"kubernetes.io/projected/b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a-kube-api-access-fkzc2\") pod \"cinder-operator-controller-manager-859b6ccc6-wrfxk\" (UID: \"b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.639029 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jc5b\" (UniqueName: \"kubernetes.io/projected/511305c1-7bff-43ce-b398-e5aec02fa9ec-kube-api-access-8jc5b\") pod \"heat-operator-controller-manager-5f64f6f8bb-rvzdw\" (UID: \"511305c1-7bff-43ce-b398-e5aec02fa9ec\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.639056 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96nkc\" (UniqueName: \"kubernetes.io/projected/25671f72-e601-41d8-9617-fb9c436e7959-kube-api-access-96nkc\") pod \"horizon-operator-controller-manager-68c6d99b8f-rrjv5\" (UID: \"25671f72-e601-41d8-9617-fb9c436e7959\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.647227 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.648591 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.654651 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-tdw5r" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.662198 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.663322 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.666944 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.673509 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-6vcjm" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.678827 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.680124 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.687518 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-kqgnd" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.688708 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlvfb\" (UniqueName: \"kubernetes.io/projected/ebffce09-7b77-4c54-9c5a-520517cc3aa8-kube-api-access-qlvfb\") pod \"barbican-operator-controller-manager-7d9dfd778-f8852\" (UID: \"ebffce09-7b77-4c54-9c5a-520517cc3aa8\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.700364 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sntk\" (UniqueName: \"kubernetes.io/projected/55a262ed-8b7c-4e28-af82-89c5df1f675b-kube-api-access-8sntk\") pod \"designate-operator-controller-manager-78b4bc895b-t2zqn\" (UID: \"55a262ed-8b7c-4e28-af82-89c5df1f675b\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.706888 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9zpt\" (UniqueName: \"kubernetes.io/projected/517bb39b-5710-45f0-b70a-694dc5b4d044-kube-api-access-f9zpt\") pod \"glance-operator-controller-manager-77987cd8cd-p6gnj\" (UID: \"517bb39b-5710-45f0-b70a-694dc5b4d044\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.706964 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.708307 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.711909 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-589pd" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.717451 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkzc2\" (UniqueName: \"kubernetes.io/projected/b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a-kube-api-access-fkzc2\") pod \"cinder-operator-controller-manager-859b6ccc6-wrfxk\" (UID: \"b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.719210 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.720237 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.726473 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-z7vq2" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.738716 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.740698 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzxmc\" (UniqueName: \"kubernetes.io/projected/d30f627a-9e43-4435-aaf3-31a0631bfcba-kube-api-access-dzxmc\") pod \"manila-operator-controller-manager-7c79b5df47-ctg6p\" (UID: \"d30f627a-9e43-4435-aaf3-31a0631bfcba\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.740741 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g759j\" (UniqueName: \"kubernetes.io/projected/d4adcb8f-8951-4c59-adf3-e94b1a5e202b-kube-api-access-g759j\") pod \"mariadb-operator-controller-manager-56bbcc9d85-472kw\" (UID: \"d4adcb8f-8951-4c59-adf3-e94b1a5e202b\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.740775 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8ph5\" (UniqueName: \"kubernetes.io/projected/701bf927-7bac-49a4-9435-a68ebd3ff8c4-kube-api-access-d8ph5\") pod \"keystone-operator-controller-manager-7765d96ddf-w8qcr\" (UID: \"701bf927-7bac-49a4-9435-a68ebd3ff8c4\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.740801 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ctxb\" (UniqueName: \"kubernetes.io/projected/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-kube-api-access-4ctxb\") pod \"infra-operator-controller-manager-57548d458d-fnkqx\" (UID: \"ab5cdcbf-c82f-48be-a97d-65a856e95bd9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.740841 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fq5r\" (UniqueName: \"kubernetes.io/projected/5cd3d875-b57b-4b61-ac66-17035d351f35-kube-api-access-8fq5r\") pod \"ironic-operator-controller-manager-6c548fd776-ms8zf\" (UID: \"5cd3d875-b57b-4b61-ac66-17035d351f35\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.740877 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jc5b\" (UniqueName: \"kubernetes.io/projected/511305c1-7bff-43ce-b398-e5aec02fa9ec-kube-api-access-8jc5b\") pod \"heat-operator-controller-manager-5f64f6f8bb-rvzdw\" (UID: \"511305c1-7bff-43ce-b398-e5aec02fa9ec\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.740902 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96nkc\" (UniqueName: \"kubernetes.io/projected/25671f72-e601-41d8-9617-fb9c436e7959-kube-api-access-96nkc\") pod \"horizon-operator-controller-manager-68c6d99b8f-rrjv5\" (UID: \"25671f72-e601-41d8-9617-fb9c436e7959\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.740928 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert\") pod \"infra-operator-controller-manager-57548d458d-fnkqx\" (UID: \"ab5cdcbf-c82f-48be-a97d-65a856e95bd9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.740946 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jhk87\" (UniqueName: \"kubernetes.io/projected/b4d06e27-b91f-4602-b327-3435d8977280-kube-api-access-jhk87\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-7rqsz\" (UID: \"b4d06e27-b91f-4602-b327-3435d8977280\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz" Dec 05 19:31:28 crc kubenswrapper[4982]: E1205 19:31:28.741641 4982 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 19:31:28 crc kubenswrapper[4982]: E1205 19:31:28.741703 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert podName:ab5cdcbf-c82f-48be-a97d-65a856e95bd9 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:29.241681988 +0000 UTC m=+1068.123567983 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert") pod "infra-operator-controller-manager-57548d458d-fnkqx" (UID: "ab5cdcbf-c82f-48be-a97d-65a856e95bd9") : secret "infra-operator-webhook-server-cert" not found Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.750646 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.754697 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.761817 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.770040 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.771356 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.826356 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.837287 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.838044 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-g8qsp" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.838387 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.843012 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fq5r\" (UniqueName: \"kubernetes.io/projected/5cd3d875-b57b-4b61-ac66-17035d351f35-kube-api-access-8fq5r\") pod \"ironic-operator-controller-manager-6c548fd776-ms8zf\" (UID: \"5cd3d875-b57b-4b61-ac66-17035d351f35\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.843327 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jhk87\" (UniqueName: \"kubernetes.io/projected/b4d06e27-b91f-4602-b327-3435d8977280-kube-api-access-jhk87\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-7rqsz\" (UID: \"b4d06e27-b91f-4602-b327-3435d8977280\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.843366 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzxmc\" (UniqueName: \"kubernetes.io/projected/d30f627a-9e43-4435-aaf3-31a0631bfcba-kube-api-access-dzxmc\") pod \"manila-operator-controller-manager-7c79b5df47-ctg6p\" (UID: \"d30f627a-9e43-4435-aaf3-31a0631bfcba\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.843399 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g759j\" (UniqueName: \"kubernetes.io/projected/d4adcb8f-8951-4c59-adf3-e94b1a5e202b-kube-api-access-g759j\") pod \"mariadb-operator-controller-manager-56bbcc9d85-472kw\" (UID: \"d4adcb8f-8951-4c59-adf3-e94b1a5e202b\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.843447 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8ph5\" (UniqueName: \"kubernetes.io/projected/701bf927-7bac-49a4-9435-a68ebd3ff8c4-kube-api-access-d8ph5\") pod \"keystone-operator-controller-manager-7765d96ddf-w8qcr\" (UID: \"701bf927-7bac-49a4-9435-a68ebd3ff8c4\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.846357 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96nkc\" (UniqueName: \"kubernetes.io/projected/25671f72-e601-41d8-9617-fb9c436e7959-kube-api-access-96nkc\") pod \"horizon-operator-controller-manager-68c6d99b8f-rrjv5\" (UID: \"25671f72-e601-41d8-9617-fb9c436e7959\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.847769 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.849069 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ctxb\" (UniqueName: \"kubernetes.io/projected/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-kube-api-access-4ctxb\") pod \"infra-operator-controller-manager-57548d458d-fnkqx\" (UID: \"ab5cdcbf-c82f-48be-a97d-65a856e95bd9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.853584 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jc5b\" (UniqueName: \"kubernetes.io/projected/511305c1-7bff-43ce-b398-e5aec02fa9ec-kube-api-access-8jc5b\") pod \"heat-operator-controller-manager-5f64f6f8bb-rvzdw\" (UID: \"511305c1-7bff-43ce-b398-e5aec02fa9ec\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.880402 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.885200 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jhk87\" (UniqueName: \"kubernetes.io/projected/b4d06e27-b91f-4602-b327-3435d8977280-kube-api-access-jhk87\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-7rqsz\" (UID: \"b4d06e27-b91f-4602-b327-3435d8977280\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.886069 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.889589 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.890355 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8ph5\" (UniqueName: \"kubernetes.io/projected/701bf927-7bac-49a4-9435-a68ebd3ff8c4-kube-api-access-d8ph5\") pod \"keystone-operator-controller-manager-7765d96ddf-w8qcr\" (UID: \"701bf927-7bac-49a4-9435-a68ebd3ff8c4\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.893718 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-d6svn"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.895005 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-d6svn" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.895758 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzxmc\" (UniqueName: \"kubernetes.io/projected/d30f627a-9e43-4435-aaf3-31a0631bfcba-kube-api-access-dzxmc\") pod \"manila-operator-controller-manager-7c79b5df47-ctg6p\" (UID: \"d30f627a-9e43-4435-aaf3-31a0631bfcba\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.896073 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fq5r\" (UniqueName: \"kubernetes.io/projected/5cd3d875-b57b-4b61-ac66-17035d351f35-kube-api-access-8fq5r\") pod \"ironic-operator-controller-manager-6c548fd776-ms8zf\" (UID: \"5cd3d875-b57b-4b61-ac66-17035d351f35\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.898104 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-4qhxx" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.909198 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g759j\" (UniqueName: \"kubernetes.io/projected/d4adcb8f-8951-4c59-adf3-e94b1a5e202b-kube-api-access-g759j\") pod \"mariadb-operator-controller-manager-56bbcc9d85-472kw\" (UID: \"d4adcb8f-8951-4c59-adf3-e94b1a5e202b\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.912854 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.914014 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.916337 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.917027 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-bhdth" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.922240 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.923289 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.926780 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-ftvzz" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.944499 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8rft\" (UniqueName: \"kubernetes.io/projected/2e0bfce4-bfd5-49ae-858f-647f5f8a919e-kube-api-access-h8rft\") pod \"nova-operator-controller-manager-697bc559fc-7hbtv\" (UID: \"2e0bfce4-bfd5-49ae-858f-647f5f8a919e\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.948974 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-d6svn"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.953881 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.954958 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.959503 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-ss5r5" Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.974669 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl"] Dec 05 19:31:28 crc kubenswrapper[4982]: I1205 19:31:28.989213 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.005908 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.008328 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.015363 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-rvz8f" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.039047 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.043237 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.045305 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.047812 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl\" (UID: \"c286a8e0-15ff-4705-a03f-bca226144360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.047882 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzz89\" (UniqueName: \"kubernetes.io/projected/c286a8e0-15ff-4705-a03f-bca226144360-kube-api-access-hzz89\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl\" (UID: \"c286a8e0-15ff-4705-a03f-bca226144360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.051115 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cjtn\" (UniqueName: \"kubernetes.io/projected/a964acf3-fa80-4561-86da-c831a10fc58e-kube-api-access-2cjtn\") pod \"ovn-operator-controller-manager-b6456fdb6-xww76\" (UID: \"a964acf3-fa80-4561-86da-c831a10fc58e\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.051211 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8rft\" (UniqueName: \"kubernetes.io/projected/2e0bfce4-bfd5-49ae-858f-647f5f8a919e-kube-api-access-h8rft\") pod \"nova-operator-controller-manager-697bc559fc-7hbtv\" (UID: \"2e0bfce4-bfd5-49ae-858f-647f5f8a919e\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.051287 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq2xm\" (UniqueName: \"kubernetes.io/projected/983b81df-c036-4f75-8d49-259f09235991-kube-api-access-fq2xm\") pod \"octavia-operator-controller-manager-998648c74-d6svn\" (UID: \"983b81df-c036-4f75-8d49-259f09235991\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-d6svn" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.051539 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-mnhjg" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.059429 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.059799 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.082749 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.086582 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8rft\" (UniqueName: \"kubernetes.io/projected/2e0bfce4-bfd5-49ae-858f-647f5f8a919e-kube-api-access-h8rft\") pod \"nova-operator-controller-manager-697bc559fc-7hbtv\" (UID: \"2e0bfce4-bfd5-49ae-858f-647f5f8a919e\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.129105 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.148672 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.153140 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq2xm\" (UniqueName: \"kubernetes.io/projected/983b81df-c036-4f75-8d49-259f09235991-kube-api-access-fq2xm\") pod \"octavia-operator-controller-manager-998648c74-d6svn\" (UID: \"983b81df-c036-4f75-8d49-259f09235991\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-d6svn" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.169376 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl\" (UID: \"c286a8e0-15ff-4705-a03f-bca226144360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.169410 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzz89\" (UniqueName: \"kubernetes.io/projected/c286a8e0-15ff-4705-a03f-bca226144360-kube-api-access-hzz89\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl\" (UID: \"c286a8e0-15ff-4705-a03f-bca226144360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.169498 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knsvv\" (UniqueName: \"kubernetes.io/projected/c959458c-8a9a-4d37-889a-577a673e5305-kube-api-access-knsvv\") pod \"placement-operator-controller-manager-78f8948974-kzpx6\" (UID: \"c959458c-8a9a-4d37-889a-577a673e5305\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6" Dec 05 19:31:29 crc kubenswrapper[4982]: E1205 19:31:29.169518 4982 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 19:31:29 crc kubenswrapper[4982]: E1205 19:31:29.169627 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert podName:c286a8e0-15ff-4705-a03f-bca226144360 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:29.669602128 +0000 UTC m=+1068.551488193 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" (UID: "c286a8e0-15ff-4705-a03f-bca226144360") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.169533 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cjtn\" (UniqueName: \"kubernetes.io/projected/a964acf3-fa80-4561-86da-c831a10fc58e-kube-api-access-2cjtn\") pod \"ovn-operator-controller-manager-b6456fdb6-xww76\" (UID: \"a964acf3-fa80-4561-86da-c831a10fc58e\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.169382 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.169841 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scnqx\" (UniqueName: \"kubernetes.io/projected/7b1c4531-0231-42d4-94e9-0a211394dfa6-kube-api-access-scnqx\") pod \"telemetry-operator-controller-manager-6b4849bfff-skwf7\" (UID: \"7b1c4531-0231-42d4-94e9-0a211394dfa6\") " pod="openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.169873 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n7cfv\" (UniqueName: \"kubernetes.io/projected/167e755e-d998-47ca-88dd-0bc17c975864-kube-api-access-n7cfv\") pod \"swift-operator-controller-manager-5f8c65bbfc-8ttpr\" (UID: \"167e755e-d998-47ca-88dd-0bc17c975864\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.171824 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-d48dh"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.175818 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.178639 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-m8v6t" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.191791 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-d48dh"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.193095 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.194523 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq2xm\" (UniqueName: \"kubernetes.io/projected/983b81df-c036-4f75-8d49-259f09235991-kube-api-access-fq2xm\") pod \"octavia-operator-controller-manager-998648c74-d6svn\" (UID: \"983b81df-c036-4f75-8d49-259f09235991\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-d6svn" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.198450 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cjtn\" (UniqueName: \"kubernetes.io/projected/a964acf3-fa80-4561-86da-c831a10fc58e-kube-api-access-2cjtn\") pod \"ovn-operator-controller-manager-b6456fdb6-xww76\" (UID: \"a964acf3-fa80-4561-86da-c831a10fc58e\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.207101 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzz89\" (UniqueName: \"kubernetes.io/projected/c286a8e0-15ff-4705-a03f-bca226144360-kube-api-access-hzz89\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl\" (UID: \"c286a8e0-15ff-4705-a03f-bca226144360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.209216 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.235547 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-d6svn" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.259440 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.271799 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert\") pod \"infra-operator-controller-manager-57548d458d-fnkqx\" (UID: \"ab5cdcbf-c82f-48be-a97d-65a856e95bd9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.271841 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knsvv\" (UniqueName: \"kubernetes.io/projected/c959458c-8a9a-4d37-889a-577a673e5305-kube-api-access-knsvv\") pod \"placement-operator-controller-manager-78f8948974-kzpx6\" (UID: \"c959458c-8a9a-4d37-889a-577a673e5305\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.271876 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n7cfv\" (UniqueName: \"kubernetes.io/projected/167e755e-d998-47ca-88dd-0bc17c975864-kube-api-access-n7cfv\") pod \"swift-operator-controller-manager-5f8c65bbfc-8ttpr\" (UID: \"167e755e-d998-47ca-88dd-0bc17c975864\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.271893 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scnqx\" (UniqueName: \"kubernetes.io/projected/7b1c4531-0231-42d4-94e9-0a211394dfa6-kube-api-access-scnqx\") pod \"telemetry-operator-controller-manager-6b4849bfff-skwf7\" (UID: \"7b1c4531-0231-42d4-94e9-0a211394dfa6\") " pod="openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.271921 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlv95\" (UniqueName: \"kubernetes.io/projected/26fa2fe6-70bb-4a70-8bee-b0cde872beb1-kube-api-access-wlv95\") pod \"test-operator-controller-manager-5854674fcc-d48dh\" (UID: \"26fa2fe6-70bb-4a70-8bee-b0cde872beb1\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" Dec 05 19:31:29 crc kubenswrapper[4982]: E1205 19:31:29.272130 4982 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 19:31:29 crc kubenswrapper[4982]: E1205 19:31:29.272205 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert podName:ab5cdcbf-c82f-48be-a97d-65a856e95bd9 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:30.272187358 +0000 UTC m=+1069.154073353 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert") pod "infra-operator-controller-manager-57548d458d-fnkqx" (UID: "ab5cdcbf-c82f-48be-a97d-65a856e95bd9") : secret "infra-operator-webhook-server-cert" not found Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.298411 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n7cfv\" (UniqueName: \"kubernetes.io/projected/167e755e-d998-47ca-88dd-0bc17c975864-kube-api-access-n7cfv\") pod \"swift-operator-controller-manager-5f8c65bbfc-8ttpr\" (UID: \"167e755e-d998-47ca-88dd-0bc17c975864\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.300410 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knsvv\" (UniqueName: \"kubernetes.io/projected/c959458c-8a9a-4d37-889a-577a673e5305-kube-api-access-knsvv\") pod \"placement-operator-controller-manager-78f8948974-kzpx6\" (UID: \"c959458c-8a9a-4d37-889a-577a673e5305\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.300560 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scnqx\" (UniqueName: \"kubernetes.io/projected/7b1c4531-0231-42d4-94e9-0a211394dfa6-kube-api-access-scnqx\") pod \"telemetry-operator-controller-manager-6b4849bfff-skwf7\" (UID: \"7b1c4531-0231-42d4-94e9-0a211394dfa6\") " pod="openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.347441 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.354990 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.356650 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.361715 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-p895m" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.372766 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlv95\" (UniqueName: \"kubernetes.io/projected/26fa2fe6-70bb-4a70-8bee-b0cde872beb1-kube-api-access-wlv95\") pod \"test-operator-controller-manager-5854674fcc-d48dh\" (UID: \"26fa2fe6-70bb-4a70-8bee-b0cde872beb1\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.387303 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.397746 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.413135 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlv95\" (UniqueName: \"kubernetes.io/projected/26fa2fe6-70bb-4a70-8bee-b0cde872beb1-kube-api-access-wlv95\") pod \"test-operator-controller-manager-5854674fcc-d48dh\" (UID: \"26fa2fe6-70bb-4a70-8bee-b0cde872beb1\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.420235 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.421568 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.424543 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.425475 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.425852 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-k7m9j" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.441203 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.476793 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hflm8\" (UniqueName: \"kubernetes.io/projected/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-kube-api-access-hflm8\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.477132 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.477174 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2f2q\" (UniqueName: \"kubernetes.io/projected/cf51df5f-9adb-4929-9a00-6bfeafdfa069-kube-api-access-q2f2q\") pod \"watcher-operator-controller-manager-769dc69bc-829rq\" (UID: \"cf51df5f-9adb-4929-9a00-6bfeafdfa069\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.477231 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.508695 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.563197 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.564186 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.567443 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-l9hsb" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.568416 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.574753 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.576386 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.579604 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.579641 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2f2q\" (UniqueName: \"kubernetes.io/projected/cf51df5f-9adb-4929-9a00-6bfeafdfa069-kube-api-access-q2f2q\") pod \"watcher-operator-controller-manager-769dc69bc-829rq\" (UID: \"cf51df5f-9adb-4929-9a00-6bfeafdfa069\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.579671 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.579706 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hflm8\" (UniqueName: \"kubernetes.io/projected/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-kube-api-access-hflm8\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.579738 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzw7l\" (UniqueName: \"kubernetes.io/projected/1f84d21b-6ce8-4c97-a104-cb308ce8527d-kube-api-access-dzw7l\") pod \"rabbitmq-cluster-operator-manager-668c99d594-wst8r\" (UID: \"1f84d21b-6ce8-4c97-a104-cb308ce8527d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" Dec 05 19:31:29 crc kubenswrapper[4982]: E1205 19:31:29.579895 4982 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 19:31:29 crc kubenswrapper[4982]: E1205 19:31:29.579934 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs podName:eb889ad3-88cd-45b4-9b56-13d3181ba3e6 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:30.079919817 +0000 UTC m=+1068.961805812 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs") pod "openstack-operator-controller-manager-54bf4fb767-47tg5" (UID: "eb889ad3-88cd-45b4-9b56-13d3181ba3e6") : secret "webhook-server-cert" not found Dec 05 19:31:29 crc kubenswrapper[4982]: E1205 19:31:29.580276 4982 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 19:31:29 crc kubenswrapper[4982]: E1205 19:31:29.580313 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs podName:eb889ad3-88cd-45b4-9b56-13d3181ba3e6 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:30.080305587 +0000 UTC m=+1068.962191582 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs") pod "openstack-operator-controller-manager-54bf4fb767-47tg5" (UID: "eb889ad3-88cd-45b4-9b56-13d3181ba3e6") : secret "metrics-server-cert" not found Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.635603 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hflm8\" (UniqueName: \"kubernetes.io/projected/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-kube-api-access-hflm8\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.641597 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2f2q\" (UniqueName: \"kubernetes.io/projected/cf51df5f-9adb-4929-9a00-6bfeafdfa069-kube-api-access-q2f2q\") pod \"watcher-operator-controller-manager-769dc69bc-829rq\" (UID: \"cf51df5f-9adb-4929-9a00-6bfeafdfa069\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.681820 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.683472 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzw7l\" (UniqueName: \"kubernetes.io/projected/1f84d21b-6ce8-4c97-a104-cb308ce8527d-kube-api-access-dzw7l\") pod \"rabbitmq-cluster-operator-manager-668c99d594-wst8r\" (UID: \"1f84d21b-6ce8-4c97-a104-cb308ce8527d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.683514 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl\" (UID: \"c286a8e0-15ff-4705-a03f-bca226144360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:29 crc kubenswrapper[4982]: E1205 19:31:29.683671 4982 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 19:31:29 crc kubenswrapper[4982]: E1205 19:31:29.683727 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert podName:c286a8e0-15ff-4705-a03f-bca226144360 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:30.683712997 +0000 UTC m=+1069.565598992 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" (UID: "c286a8e0-15ff-4705-a03f-bca226144360") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.698355 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw"] Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.699911 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.718039 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzw7l\" (UniqueName: \"kubernetes.io/projected/1f84d21b-6ce8-4c97-a104-cb308ce8527d-kube-api-access-dzw7l\") pod \"rabbitmq-cluster-operator-manager-668c99d594-wst8r\" (UID: \"1f84d21b-6ce8-4c97-a104-cb308ce8527d\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" Dec 05 19:31:29 crc kubenswrapper[4982]: I1205 19:31:29.896504 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.095745 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p"] Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.096507 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.096556 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.096689 4982 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.096744 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs podName:eb889ad3-88cd-45b4-9b56-13d3181ba3e6 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:31.096728914 +0000 UTC m=+1069.978614909 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs") pod "openstack-operator-controller-manager-54bf4fb767-47tg5" (UID: "eb889ad3-88cd-45b4-9b56-13d3181ba3e6") : secret "webhook-server-cert" not found Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.096689 4982 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.096942 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs podName:eb889ad3-88cd-45b4-9b56-13d3181ba3e6 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:31.096911079 +0000 UTC m=+1069.978797144 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs") pod "openstack-operator-controller-manager-54bf4fb767-47tg5" (UID: "eb889ad3-88cd-45b4-9b56-13d3181ba3e6") : secret "metrics-server-cert" not found Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.128865 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn"] Dec 05 19:31:30 crc kubenswrapper[4982]: W1205 19:31:30.129665 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25671f72_e601_41d8_9617_fb9c436e7959.slice/crio-037bc2749b42287dd94d589ed6332eed6f52780a9ea77b160e92884de796184c WatchSource:0}: Error finding container 037bc2749b42287dd94d589ed6332eed6f52780a9ea77b160e92884de796184c: Status 404 returned error can't find the container with id 037bc2749b42287dd94d589ed6332eed6f52780a9ea77b160e92884de796184c Dec 05 19:31:30 crc kubenswrapper[4982]: W1205 19:31:30.131108 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cd3d875_b57b_4b61_ac66_17035d351f35.slice/crio-6f435c1d3efb487daa1332cd80dfe4780c3843b82d272bd11802a91647dca5b1 WatchSource:0}: Error finding container 6f435c1d3efb487daa1332cd80dfe4780c3843b82d272bd11802a91647dca5b1: Status 404 returned error can't find the container with id 6f435c1d3efb487daa1332cd80dfe4780c3843b82d272bd11802a91647dca5b1 Dec 05 19:31:30 crc kubenswrapper[4982]: W1205 19:31:30.133903 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod517bb39b_5710_45f0_b70a_694dc5b4d044.slice/crio-45acef1f571ed941579185fc8e91ae20f7c0c1defe44f813798d33c10a858261 WatchSource:0}: Error finding container 45acef1f571ed941579185fc8e91ae20f7c0c1defe44f813798d33c10a858261: Status 404 returned error can't find the container with id 45acef1f571ed941579185fc8e91ae20f7c0c1defe44f813798d33c10a858261 Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.137691 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf"] Dec 05 19:31:30 crc kubenswrapper[4982]: W1205 19:31:30.139574 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55a262ed_8b7c_4e28_af82_89c5df1f675b.slice/crio-de47005a6c5edfa29092d371a8be3e5e4feb79c56db42ec0c58e885ae09e8df2 WatchSource:0}: Error finding container de47005a6c5edfa29092d371a8be3e5e4feb79c56db42ec0c58e885ae09e8df2: Status 404 returned error can't find the container with id de47005a6c5edfa29092d371a8be3e5e4feb79c56db42ec0c58e885ae09e8df2 Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.143608 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5"] Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.148759 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj"] Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.201526 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr"] Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.228015 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852" event={"ID":"ebffce09-7b77-4c54-9c5a-520517cc3aa8","Type":"ContainerStarted","Data":"708f28158e731e0393bf1ed468b20b1bea464a8228f4367939f2e8090764ab2d"} Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.236125 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj" event={"ID":"517bb39b-5710-45f0-b70a-694dc5b4d044","Type":"ContainerStarted","Data":"45acef1f571ed941579185fc8e91ae20f7c0c1defe44f813798d33c10a858261"} Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.245360 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk" event={"ID":"b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a","Type":"ContainerStarted","Data":"87ef99012acd7ba90d819fbd861775bb2716b88961db119a9c5f4d765d94e5d8"} Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.247996 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5" event={"ID":"25671f72-e601-41d8-9617-fb9c436e7959","Type":"ContainerStarted","Data":"037bc2749b42287dd94d589ed6332eed6f52780a9ea77b160e92884de796184c"} Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.249997 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw" event={"ID":"511305c1-7bff-43ce-b398-e5aec02fa9ec","Type":"ContainerStarted","Data":"56e6664c0738cecabf3262a405ca1db6063e9ffd75f94f3c0ff73e31865ca889"} Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.254013 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p" event={"ID":"d30f627a-9e43-4435-aaf3-31a0631bfcba","Type":"ContainerStarted","Data":"42d3bfa047b80d000d9e7c1ed35516730a5ef39f9d4caf7d2cb6b5ce2314423b"} Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.255582 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf" event={"ID":"5cd3d875-b57b-4b61-ac66-17035d351f35","Type":"ContainerStarted","Data":"6f435c1d3efb487daa1332cd80dfe4780c3843b82d272bd11802a91647dca5b1"} Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.257432 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn" event={"ID":"55a262ed-8b7c-4e28-af82-89c5df1f675b","Type":"ContainerStarted","Data":"de47005a6c5edfa29092d371a8be3e5e4feb79c56db42ec0c58e885ae09e8df2"} Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.301085 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert\") pod \"infra-operator-controller-manager-57548d458d-fnkqx\" (UID: \"ab5cdcbf-c82f-48be-a97d-65a856e95bd9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.301238 4982 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.301312 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert podName:ab5cdcbf-c82f-48be-a97d-65a856e95bd9 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:32.301293928 +0000 UTC m=+1071.183179923 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert") pod "infra-operator-controller-manager-57548d458d-fnkqx" (UID: "ab5cdcbf-c82f-48be-a97d-65a856e95bd9") : secret "infra-operator-webhook-server-cert" not found Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.479812 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-d6svn"] Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.491703 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7"] Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.497828 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz"] Dec 05 19:31:30 crc kubenswrapper[4982]: W1205 19:31:30.500945 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda964acf3_fa80_4561_86da_c831a10fc58e.slice/crio-08b160e146eb1db73e7509096799f086c783f0c298499621aea3475e6026551c WatchSource:0}: Error finding container 08b160e146eb1db73e7509096799f086c783f0c298499621aea3475e6026551c: Status 404 returned error can't find the container with id 08b160e146eb1db73e7509096799f086c783f0c298499621aea3475e6026551c Dec 05 19:31:30 crc kubenswrapper[4982]: W1205 19:31:30.512699 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod983b81df_c036_4f75_8d49_259f09235991.slice/crio-fc4651da47e36b61d3e9cc34a1909d444b81e47a31ab23c01ebd7cf6a7d68676 WatchSource:0}: Error finding container fc4651da47e36b61d3e9cc34a1909d444b81e47a31ab23c01ebd7cf6a7d68676: Status 404 returned error can't find the container with id fc4651da47e36b61d3e9cc34a1909d444b81e47a31ab23c01ebd7cf6a7d68676 Dec 05 19:31:30 crc kubenswrapper[4982]: W1205 19:31:30.515677 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26fa2fe6_70bb_4a70_8bee_b0cde872beb1.slice/crio-8d5d327940efd4a565615b67fcceb04e366deddc46ecb357de43540aa86c18ad WatchSource:0}: Error finding container 8d5d327940efd4a565615b67fcceb04e366deddc46ecb357de43540aa86c18ad: Status 404 returned error can't find the container with id 8d5d327940efd4a565615b67fcceb04e366deddc46ecb357de43540aa86c18ad Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.528488 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76"] Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.544963 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-d48dh"] Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.545430 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g759j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-472kw_openstack-operators(d4adcb8f-8951-4c59-adf3-e94b1a5e202b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.543074 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wlv95,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-d48dh_openstack-operators(26fa2fe6-70bb-4a70-8bee-b0cde872beb1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.554542 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wlv95,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-d48dh_openstack-operators(26fa2fe6-70bb-4a70-8bee-b0cde872beb1): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.554629 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g759j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-472kw_openstack-operators(d4adcb8f-8951-4c59-adf3-e94b1a5e202b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.555953 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" podUID="d4adcb8f-8951-4c59-adf3-e94b1a5e202b" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.556019 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" podUID="26fa2fe6-70bb-4a70-8bee-b0cde872beb1" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.557655 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h8rft,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-7hbtv_openstack-operators(2e0bfce4-bfd5-49ae-858f-647f5f8a919e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.559962 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr"] Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.564640 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h8rft,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-7hbtv_openstack-operators(2e0bfce4-bfd5-49ae-858f-647f5f8a919e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.566736 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" podUID="2e0bfce4-bfd5-49ae-858f-647f5f8a919e" Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.574818 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv"] Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.591622 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw"] Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.599638 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6"] Dec 05 19:31:30 crc kubenswrapper[4982]: W1205 19:31:30.621420 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf51df5f_9adb_4929_9a00_6bfeafdfa069.slice/crio-e47bfda9200053f1aeacd910d9dfd256ada7e0527b8c23a64e6957a5ff8454bb WatchSource:0}: Error finding container e47bfda9200053f1aeacd910d9dfd256ada7e0527b8c23a64e6957a5ff8454bb: Status 404 returned error can't find the container with id e47bfda9200053f1aeacd910d9dfd256ada7e0527b8c23a64e6957a5ff8454bb Dec 05 19:31:30 crc kubenswrapper[4982]: W1205 19:31:30.624065 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1f84d21b_6ce8_4c97_a104_cb308ce8527d.slice/crio-f6f377e8185f336ebabb3df3cbc7f984576c2cb19d1f0b302ce62e9a154dd664 WatchSource:0}: Error finding container f6f377e8185f336ebabb3df3cbc7f984576c2cb19d1f0b302ce62e9a154dd664: Status 404 returned error can't find the container with id f6f377e8185f336ebabb3df3cbc7f984576c2cb19d1f0b302ce62e9a154dd664 Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.627640 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-q2f2q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-829rq_openstack-operators(cf51df5f-9adb-4929-9a00-6bfeafdfa069): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.629020 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dzw7l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-wst8r_openstack-operators(1f84d21b-6ce8-4c97-a104-cb308ce8527d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.629591 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-q2f2q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-829rq_openstack-operators(cf51df5f-9adb-4929-9a00-6bfeafdfa069): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.630446 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" podUID="1f84d21b-6ce8-4c97-a104-cb308ce8527d" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.630841 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" podUID="cf51df5f-9adb-4929-9a00-6bfeafdfa069" Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.632637 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq"] Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.641494 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r"] Dec 05 19:31:30 crc kubenswrapper[4982]: I1205 19:31:30.709104 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl\" (UID: \"c286a8e0-15ff-4705-a03f-bca226144360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.709555 4982 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 19:31:30 crc kubenswrapper[4982]: E1205 19:31:30.709676 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert podName:c286a8e0-15ff-4705-a03f-bca226144360 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:32.709598007 +0000 UTC m=+1071.591484002 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" (UID: "c286a8e0-15ff-4705-a03f-bca226144360") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.118525 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.118696 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:31 crc kubenswrapper[4982]: E1205 19:31:31.118872 4982 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 19:31:31 crc kubenswrapper[4982]: E1205 19:31:31.118977 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs podName:eb889ad3-88cd-45b4-9b56-13d3181ba3e6 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:33.118943601 +0000 UTC m=+1072.000829596 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs") pod "openstack-operator-controller-manager-54bf4fb767-47tg5" (UID: "eb889ad3-88cd-45b4-9b56-13d3181ba3e6") : secret "metrics-server-cert" not found Dec 05 19:31:31 crc kubenswrapper[4982]: E1205 19:31:31.119141 4982 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 19:31:31 crc kubenswrapper[4982]: E1205 19:31:31.119440 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs podName:eb889ad3-88cd-45b4-9b56-13d3181ba3e6 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:33.119414293 +0000 UTC m=+1072.001300288 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs") pod "openstack-operator-controller-manager-54bf4fb767-47tg5" (UID: "eb889ad3-88cd-45b4-9b56-13d3181ba3e6") : secret "webhook-server-cert" not found Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.289965 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" event={"ID":"26fa2fe6-70bb-4a70-8bee-b0cde872beb1","Type":"ContainerStarted","Data":"8d5d327940efd4a565615b67fcceb04e366deddc46ecb357de43540aa86c18ad"} Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.292510 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6" event={"ID":"c959458c-8a9a-4d37-889a-577a673e5305","Type":"ContainerStarted","Data":"14b78352b113d1f33227327b302df44361646eea03b6b08062819d352d4037d1"} Dec 05 19:31:31 crc kubenswrapper[4982]: E1205 19:31:31.293285 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" podUID="26fa2fe6-70bb-4a70-8bee-b0cde872beb1" Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.293665 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7" event={"ID":"7b1c4531-0231-42d4-94e9-0a211394dfa6","Type":"ContainerStarted","Data":"b70ab9c7053c7470ca0040c7a2504c2be63a1db12b4d7a4b28ca4175169cda60"} Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.295029 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76" event={"ID":"a964acf3-fa80-4561-86da-c831a10fc58e","Type":"ContainerStarted","Data":"08b160e146eb1db73e7509096799f086c783f0c298499621aea3475e6026551c"} Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.297643 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr" event={"ID":"167e755e-d998-47ca-88dd-0bc17c975864","Type":"ContainerStarted","Data":"7835a430cd8a0d3503378a5c47b4c2880b1d06fb3457ec820c67ddd6a2dd1d43"} Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.298989 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" event={"ID":"1f84d21b-6ce8-4c97-a104-cb308ce8527d","Type":"ContainerStarted","Data":"f6f377e8185f336ebabb3df3cbc7f984576c2cb19d1f0b302ce62e9a154dd664"} Dec 05 19:31:31 crc kubenswrapper[4982]: E1205 19:31:31.300615 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" podUID="1f84d21b-6ce8-4c97-a104-cb308ce8527d" Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.303591 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-d6svn" event={"ID":"983b81df-c036-4f75-8d49-259f09235991","Type":"ContainerStarted","Data":"fc4651da47e36b61d3e9cc34a1909d444b81e47a31ab23c01ebd7cf6a7d68676"} Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.305348 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" event={"ID":"2e0bfce4-bfd5-49ae-858f-647f5f8a919e","Type":"ContainerStarted","Data":"de97bfdae6757679329eaab65799ed11eaf7ef9a9d79e7412ce0f2824e4ea917"} Dec 05 19:31:31 crc kubenswrapper[4982]: E1205 19:31:31.309490 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" podUID="2e0bfce4-bfd5-49ae-858f-647f5f8a919e" Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.310258 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" event={"ID":"cf51df5f-9adb-4929-9a00-6bfeafdfa069","Type":"ContainerStarted","Data":"e47bfda9200053f1aeacd910d9dfd256ada7e0527b8c23a64e6957a5ff8454bb"} Dec 05 19:31:31 crc kubenswrapper[4982]: E1205 19:31:31.313887 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" podUID="cf51df5f-9adb-4929-9a00-6bfeafdfa069" Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.314285 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr" event={"ID":"701bf927-7bac-49a4-9435-a68ebd3ff8c4","Type":"ContainerStarted","Data":"47158efdb690cabe19230e090f6d6a9f7d1e11f83c38b4dbbaa31a212384c561"} Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.315682 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" event={"ID":"d4adcb8f-8951-4c59-adf3-e94b1a5e202b","Type":"ContainerStarted","Data":"f7156842d635a8e1c09c7737492dc3cb01f8ab4d4f69cafe411389a1108d7e28"} Dec 05 19:31:31 crc kubenswrapper[4982]: E1205 19:31:31.319132 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" podUID="d4adcb8f-8951-4c59-adf3-e94b1a5e202b" Dec 05 19:31:31 crc kubenswrapper[4982]: I1205 19:31:31.324947 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz" event={"ID":"b4d06e27-b91f-4602-b327-3435d8977280","Type":"ContainerStarted","Data":"0571028a3893093d9d372cdad37d3b0759721fb6a0b3c366d5db5df081fe17ff"} Dec 05 19:31:32 crc kubenswrapper[4982]: I1205 19:31:32.343424 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert\") pod \"infra-operator-controller-manager-57548d458d-fnkqx\" (UID: \"ab5cdcbf-c82f-48be-a97d-65a856e95bd9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:32 crc kubenswrapper[4982]: E1205 19:31:32.343525 4982 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 19:31:32 crc kubenswrapper[4982]: E1205 19:31:32.344862 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert podName:ab5cdcbf-c82f-48be-a97d-65a856e95bd9 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:36.344843171 +0000 UTC m=+1075.226729166 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert") pod "infra-operator-controller-manager-57548d458d-fnkqx" (UID: "ab5cdcbf-c82f-48be-a97d-65a856e95bd9") : secret "infra-operator-webhook-server-cert" not found Dec 05 19:31:32 crc kubenswrapper[4982]: E1205 19:31:32.352605 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" podUID="1f84d21b-6ce8-4c97-a104-cb308ce8527d" Dec 05 19:31:32 crc kubenswrapper[4982]: E1205 19:31:32.354207 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" podUID="2e0bfce4-bfd5-49ae-858f-647f5f8a919e" Dec 05 19:31:32 crc kubenswrapper[4982]: E1205 19:31:32.354274 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" podUID="cf51df5f-9adb-4929-9a00-6bfeafdfa069" Dec 05 19:31:32 crc kubenswrapper[4982]: E1205 19:31:32.354324 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" podUID="d4adcb8f-8951-4c59-adf3-e94b1a5e202b" Dec 05 19:31:32 crc kubenswrapper[4982]: E1205 19:31:32.354372 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" podUID="26fa2fe6-70bb-4a70-8bee-b0cde872beb1" Dec 05 19:31:32 crc kubenswrapper[4982]: I1205 19:31:32.750747 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl\" (UID: \"c286a8e0-15ff-4705-a03f-bca226144360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:32 crc kubenswrapper[4982]: E1205 19:31:32.750945 4982 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 19:31:32 crc kubenswrapper[4982]: E1205 19:31:32.750991 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert podName:c286a8e0-15ff-4705-a03f-bca226144360 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:36.750976513 +0000 UTC m=+1075.632862508 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" (UID: "c286a8e0-15ff-4705-a03f-bca226144360") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 19:31:33 crc kubenswrapper[4982]: I1205 19:31:33.158779 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:33 crc kubenswrapper[4982]: I1205 19:31:33.158844 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:33 crc kubenswrapper[4982]: E1205 19:31:33.158994 4982 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 19:31:33 crc kubenswrapper[4982]: E1205 19:31:33.159036 4982 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 19:31:33 crc kubenswrapper[4982]: E1205 19:31:33.159044 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs podName:eb889ad3-88cd-45b4-9b56-13d3181ba3e6 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:37.159029025 +0000 UTC m=+1076.040915020 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs") pod "openstack-operator-controller-manager-54bf4fb767-47tg5" (UID: "eb889ad3-88cd-45b4-9b56-13d3181ba3e6") : secret "metrics-server-cert" not found Dec 05 19:31:33 crc kubenswrapper[4982]: E1205 19:31:33.159142 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs podName:eb889ad3-88cd-45b4-9b56-13d3181ba3e6 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:37.159083197 +0000 UTC m=+1076.040969192 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs") pod "openstack-operator-controller-manager-54bf4fb767-47tg5" (UID: "eb889ad3-88cd-45b4-9b56-13d3181ba3e6") : secret "webhook-server-cert" not found Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.429380 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ppf2q"] Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.433030 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.438339 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppf2q"] Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.578181 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bg7tl\" (UniqueName: \"kubernetes.io/projected/8dfed7dc-0417-43b9-a148-40164f83126f-kube-api-access-bg7tl\") pod \"redhat-marketplace-ppf2q\" (UID: \"8dfed7dc-0417-43b9-a148-40164f83126f\") " pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.578592 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8dfed7dc-0417-43b9-a148-40164f83126f-utilities\") pod \"redhat-marketplace-ppf2q\" (UID: \"8dfed7dc-0417-43b9-a148-40164f83126f\") " pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.578634 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8dfed7dc-0417-43b9-a148-40164f83126f-catalog-content\") pod \"redhat-marketplace-ppf2q\" (UID: \"8dfed7dc-0417-43b9-a148-40164f83126f\") " pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.680423 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bg7tl\" (UniqueName: \"kubernetes.io/projected/8dfed7dc-0417-43b9-a148-40164f83126f-kube-api-access-bg7tl\") pod \"redhat-marketplace-ppf2q\" (UID: \"8dfed7dc-0417-43b9-a148-40164f83126f\") " pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.680513 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8dfed7dc-0417-43b9-a148-40164f83126f-utilities\") pod \"redhat-marketplace-ppf2q\" (UID: \"8dfed7dc-0417-43b9-a148-40164f83126f\") " pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.680544 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8dfed7dc-0417-43b9-a148-40164f83126f-catalog-content\") pod \"redhat-marketplace-ppf2q\" (UID: \"8dfed7dc-0417-43b9-a148-40164f83126f\") " pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.681093 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8dfed7dc-0417-43b9-a148-40164f83126f-catalog-content\") pod \"redhat-marketplace-ppf2q\" (UID: \"8dfed7dc-0417-43b9-a148-40164f83126f\") " pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.681200 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8dfed7dc-0417-43b9-a148-40164f83126f-utilities\") pod \"redhat-marketplace-ppf2q\" (UID: \"8dfed7dc-0417-43b9-a148-40164f83126f\") " pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.702025 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bg7tl\" (UniqueName: \"kubernetes.io/projected/8dfed7dc-0417-43b9-a148-40164f83126f-kube-api-access-bg7tl\") pod \"redhat-marketplace-ppf2q\" (UID: \"8dfed7dc-0417-43b9-a148-40164f83126f\") " pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:31:34 crc kubenswrapper[4982]: I1205 19:31:34.751046 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:31:36 crc kubenswrapper[4982]: I1205 19:31:36.406550 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert\") pod \"infra-operator-controller-manager-57548d458d-fnkqx\" (UID: \"ab5cdcbf-c82f-48be-a97d-65a856e95bd9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:36 crc kubenswrapper[4982]: E1205 19:31:36.406814 4982 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 19:31:36 crc kubenswrapper[4982]: E1205 19:31:36.407048 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert podName:ab5cdcbf-c82f-48be-a97d-65a856e95bd9 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:44.407029346 +0000 UTC m=+1083.288915341 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert") pod "infra-operator-controller-manager-57548d458d-fnkqx" (UID: "ab5cdcbf-c82f-48be-a97d-65a856e95bd9") : secret "infra-operator-webhook-server-cert" not found Dec 05 19:31:36 crc kubenswrapper[4982]: I1205 19:31:36.824533 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl\" (UID: \"c286a8e0-15ff-4705-a03f-bca226144360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:36 crc kubenswrapper[4982]: E1205 19:31:36.824705 4982 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 19:31:36 crc kubenswrapper[4982]: E1205 19:31:36.824818 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert podName:c286a8e0-15ff-4705-a03f-bca226144360 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:44.824783052 +0000 UTC m=+1083.706669117 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" (UID: "c286a8e0-15ff-4705-a03f-bca226144360") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 19:31:37 crc kubenswrapper[4982]: I1205 19:31:37.231912 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:37 crc kubenswrapper[4982]: I1205 19:31:37.231969 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:37 crc kubenswrapper[4982]: E1205 19:31:37.232081 4982 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 19:31:37 crc kubenswrapper[4982]: E1205 19:31:37.232109 4982 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 19:31:37 crc kubenswrapper[4982]: E1205 19:31:37.232138 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs podName:eb889ad3-88cd-45b4-9b56-13d3181ba3e6 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:45.232124586 +0000 UTC m=+1084.114010581 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs") pod "openstack-operator-controller-manager-54bf4fb767-47tg5" (UID: "eb889ad3-88cd-45b4-9b56-13d3181ba3e6") : secret "metrics-server-cert" not found Dec 05 19:31:37 crc kubenswrapper[4982]: E1205 19:31:37.232196 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs podName:eb889ad3-88cd-45b4-9b56-13d3181ba3e6 nodeName:}" failed. No retries permitted until 2025-12-05 19:31:45.232176648 +0000 UTC m=+1084.114062703 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs") pod "openstack-operator-controller-manager-54bf4fb767-47tg5" (UID: "eb889ad3-88cd-45b4-9b56-13d3181ba3e6") : secret "webhook-server-cert" not found Dec 05 19:31:43 crc kubenswrapper[4982]: E1205 19:31:43.088886 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 05 19:31:43 crc kubenswrapper[4982]: E1205 19:31:43.089326 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-96nkc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-rrjv5_openstack-operators(25671f72-e601-41d8-9617-fb9c436e7959): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:31:43 crc kubenswrapper[4982]: E1205 19:31:43.646192 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530" Dec 05 19:31:43 crc kubenswrapper[4982]: E1205 19:31:43.646370 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8fq5r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-ms8zf_openstack-operators(5cd3d875-b57b-4b61-ac66-17035d351f35): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:31:44 crc kubenswrapper[4982]: I1205 19:31:44.474334 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert\") pod \"infra-operator-controller-manager-57548d458d-fnkqx\" (UID: \"ab5cdcbf-c82f-48be-a97d-65a856e95bd9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:44 crc kubenswrapper[4982]: I1205 19:31:44.481030 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ab5cdcbf-c82f-48be-a97d-65a856e95bd9-cert\") pod \"infra-operator-controller-manager-57548d458d-fnkqx\" (UID: \"ab5cdcbf-c82f-48be-a97d-65a856e95bd9\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:44 crc kubenswrapper[4982]: I1205 19:31:44.515063 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-z76cf" Dec 05 19:31:44 crc kubenswrapper[4982]: I1205 19:31:44.524385 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:31:44 crc kubenswrapper[4982]: I1205 19:31:44.880628 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl\" (UID: \"c286a8e0-15ff-4705-a03f-bca226144360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:44 crc kubenswrapper[4982]: I1205 19:31:44.885002 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c286a8e0-15ff-4705-a03f-bca226144360-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl\" (UID: \"c286a8e0-15ff-4705-a03f-bca226144360\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:45 crc kubenswrapper[4982]: I1205 19:31:45.145976 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-bhdth" Dec 05 19:31:45 crc kubenswrapper[4982]: I1205 19:31:45.155330 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:31:45 crc kubenswrapper[4982]: I1205 19:31:45.286821 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:45 crc kubenswrapper[4982]: I1205 19:31:45.287067 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:45 crc kubenswrapper[4982]: I1205 19:31:45.292427 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-webhook-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:45 crc kubenswrapper[4982]: I1205 19:31:45.292540 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/eb889ad3-88cd-45b4-9b56-13d3181ba3e6-metrics-certs\") pod \"openstack-operator-controller-manager-54bf4fb767-47tg5\" (UID: \"eb889ad3-88cd-45b4-9b56-13d3181ba3e6\") " pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:45 crc kubenswrapper[4982]: I1205 19:31:45.402482 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-k7m9j" Dec 05 19:31:45 crc kubenswrapper[4982]: I1205 19:31:45.410987 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:31:46 crc kubenswrapper[4982]: E1205 19:31:46.180590 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.80:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199" Dec 05 19:31:46 crc kubenswrapper[4982]: E1205 19:31:46.180779 4982 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.80:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199" Dec 05 19:31:46 crc kubenswrapper[4982]: E1205 19:31:46.180948 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.80:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-scnqx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-6b4849bfff-skwf7_openstack-operators(7b1c4531-0231-42d4-94e9-0a211394dfa6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:31:46 crc kubenswrapper[4982]: E1205 19:31:46.934133 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 05 19:31:46 crc kubenswrapper[4982]: E1205 19:31:46.934334 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-d8ph5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-w8qcr_openstack-operators(701bf927-7bac-49a4-9435-a68ebd3ff8c4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:31:47 crc kubenswrapper[4982]: I1205 19:31:47.532131 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppf2q"] Dec 05 19:31:48 crc kubenswrapper[4982]: I1205 19:31:48.524101 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppf2q" event={"ID":"8dfed7dc-0417-43b9-a148-40164f83126f","Type":"ContainerStarted","Data":"a55df9186acc2976354eadd79e047cfcd9603fb8fd957975f8ee711919a2488c"} Dec 05 19:31:48 crc kubenswrapper[4982]: I1205 19:31:48.889269 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5"] Dec 05 19:31:57 crc kubenswrapper[4982]: I1205 19:31:57.433912 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx"] Dec 05 19:31:57 crc kubenswrapper[4982]: I1205 19:31:57.488953 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl"] Dec 05 19:31:57 crc kubenswrapper[4982]: I1205 19:31:57.606754 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" event={"ID":"eb889ad3-88cd-45b4-9b56-13d3181ba3e6","Type":"ContainerStarted","Data":"89a4c345fbae16a2bda5634e00bf52c3e5e95dbd2a7c06bab228d7cdbdf04c6b"} Dec 05 19:31:57 crc kubenswrapper[4982]: W1205 19:31:57.621708 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc286a8e0_15ff_4705_a03f_bca226144360.slice/crio-2242a0caff2c5e8bdcef3c340528b65c0121b352ad382c040c9526a4bbd0591a WatchSource:0}: Error finding container 2242a0caff2c5e8bdcef3c340528b65c0121b352ad382c040c9526a4bbd0591a: Status 404 returned error can't find the container with id 2242a0caff2c5e8bdcef3c340528b65c0121b352ad382c040c9526a4bbd0591a Dec 05 19:31:57 crc kubenswrapper[4982]: W1205 19:31:57.622071 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab5cdcbf_c82f_48be_a97d_65a856e95bd9.slice/crio-2371ed88ba7a33bd95e4e48ae773e8a1ca74522767379ae92f9ac02a6c4c5509 WatchSource:0}: Error finding container 2371ed88ba7a33bd95e4e48ae773e8a1ca74522767379ae92f9ac02a6c4c5509: Status 404 returned error can't find the container with id 2371ed88ba7a33bd95e4e48ae773e8a1ca74522767379ae92f9ac02a6c4c5509 Dec 05 19:31:57 crc kubenswrapper[4982]: E1205 19:31:57.640083 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 05 19:31:57 crc kubenswrapper[4982]: E1205 19:31:57.640256 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dzw7l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-wst8r_openstack-operators(1f84d21b-6ce8-4c97-a104-cb308ce8527d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:31:57 crc kubenswrapper[4982]: E1205 19:31:57.641964 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" podUID="1f84d21b-6ce8-4c97-a104-cb308ce8527d" Dec 05 19:31:58 crc kubenswrapper[4982]: I1205 19:31:58.614583 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" event={"ID":"c286a8e0-15ff-4705-a03f-bca226144360","Type":"ContainerStarted","Data":"2242a0caff2c5e8bdcef3c340528b65c0121b352ad382c040c9526a4bbd0591a"} Dec 05 19:31:58 crc kubenswrapper[4982]: I1205 19:31:58.616619 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" event={"ID":"ab5cdcbf-c82f-48be-a97d-65a856e95bd9","Type":"ContainerStarted","Data":"2371ed88ba7a33bd95e4e48ae773e8a1ca74522767379ae92f9ac02a6c4c5509"} Dec 05 19:31:59 crc kubenswrapper[4982]: I1205 19:31:59.624895 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr" event={"ID":"167e755e-d998-47ca-88dd-0bc17c975864","Type":"ContainerStarted","Data":"4cbe6cff4473d6af3fcebeda89aa4d90bfd3fa6e43c8acf27c18db9e1f689cfd"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.648736 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76" event={"ID":"a964acf3-fa80-4561-86da-c831a10fc58e","Type":"ContainerStarted","Data":"edb3190d850f15b45866667ea537357914fa55187037164c0e582a2dfb8de1eb"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.652181 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" event={"ID":"d4adcb8f-8951-4c59-adf3-e94b1a5e202b","Type":"ContainerStarted","Data":"69383c11b4713a5e1cfb9eee71bba86c41d5f1d83002f4a031da0f52aba35bf4"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.662855 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz" event={"ID":"b4d06e27-b91f-4602-b327-3435d8977280","Type":"ContainerStarted","Data":"fc81aeb1066aeda5c1bcf497b5fb33ccc2331afe54ef101908c478d08fdca88f"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.664398 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p" event={"ID":"d30f627a-9e43-4435-aaf3-31a0631bfcba","Type":"ContainerStarted","Data":"95a906714af16da2e8122523ebd3e6a073256209469e100f49c9fadb0e649dbe"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.670334 4982 generic.go:334] "Generic (PLEG): container finished" podID="8dfed7dc-0417-43b9-a148-40164f83126f" containerID="d0c98165d19288930733d64bc39d618b0e1d3b0bfa5cf1c8676efe36cf8253de" exitCode=0 Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.670434 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppf2q" event={"ID":"8dfed7dc-0417-43b9-a148-40164f83126f","Type":"ContainerDied","Data":"d0c98165d19288930733d64bc39d618b0e1d3b0bfa5cf1c8676efe36cf8253de"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.677987 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-d6svn" event={"ID":"983b81df-c036-4f75-8d49-259f09235991","Type":"ContainerStarted","Data":"2b00ce2b6525a70c9c338f6ab3c177771fa935efa3c7883b60535199a4f4f4bd"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.682725 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn" event={"ID":"55a262ed-8b7c-4e28-af82-89c5df1f675b","Type":"ContainerStarted","Data":"f23fc5c9953394528859f05b90df48876dc738694b5c50af014693fcbd76153f"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.688986 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" event={"ID":"cf51df5f-9adb-4929-9a00-6bfeafdfa069","Type":"ContainerStarted","Data":"a2dfdf6b356e671a69e45e5ab2c30435d42ac5782f927f09ac759dca9ddef16f"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.698353 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj" event={"ID":"517bb39b-5710-45f0-b70a-694dc5b4d044","Type":"ContainerStarted","Data":"8bc3668511d936d1bfbea8176841f21f1462ed96284fe5c5a5ab1b865a5e2187"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.709867 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk" event={"ID":"b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a","Type":"ContainerStarted","Data":"10e9ac3fe275be95746a92775f90aaeae5b84f646ff2c1bec637c7fc788b2ad2"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.713295 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" event={"ID":"eb889ad3-88cd-45b4-9b56-13d3181ba3e6","Type":"ContainerStarted","Data":"5620d80a28c72bb422d2919191dc50368605ba7f81a376bdf58cfbadc02aaa9c"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.713350 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.718272 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6" event={"ID":"c959458c-8a9a-4d37-889a-577a673e5305","Type":"ContainerStarted","Data":"eefe19da0c1d412ed2c1eb0d887913ab152e047cf497cd0338f588de4e64ea8b"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.720493 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852" event={"ID":"ebffce09-7b77-4c54-9c5a-520517cc3aa8","Type":"ContainerStarted","Data":"3f1cbc9c9f0a6c2d4651f9c4d4e570e10bb6469b5315d16f42de7335433a49f1"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.722246 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw" event={"ID":"511305c1-7bff-43ce-b398-e5aec02fa9ec","Type":"ContainerStarted","Data":"c1a578e2733ed80aeed5546df7487d031e40d23c15ce5f8f4eeac9972baef51f"} Dec 05 19:32:01 crc kubenswrapper[4982]: I1205 19:32:01.771394 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" podStartSLOduration=32.771379623 podStartE2EDuration="32.771379623s" podCreationTimestamp="2025-12-05 19:31:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:32:01.769521377 +0000 UTC m=+1100.651407372" watchObservedRunningTime="2025-12-05 19:32:01.771379623 +0000 UTC m=+1100.653265628" Dec 05 19:32:03 crc kubenswrapper[4982]: E1205 19:32:03.039640 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf" podUID="5cd3d875-b57b-4b61-ac66-17035d351f35" Dec 05 19:32:03 crc kubenswrapper[4982]: E1205 19:32:03.128951 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr" podUID="701bf927-7bac-49a4-9435-a68ebd3ff8c4" Dec 05 19:32:03 crc kubenswrapper[4982]: E1205 19:32:03.144710 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7" podUID="7b1c4531-0231-42d4-94e9-0a211394dfa6" Dec 05 19:32:03 crc kubenswrapper[4982]: E1205 19:32:03.317000 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5" podUID="25671f72-e601-41d8-9617-fb9c436e7959" Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.749445 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr" event={"ID":"701bf927-7bac-49a4-9435-a68ebd3ff8c4","Type":"ContainerStarted","Data":"eccd0e009dd6557cc41d7b54ca08e0b9289d8d12a3311ab86da475ec59f34460"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.761018 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" event={"ID":"26fa2fe6-70bb-4a70-8bee-b0cde872beb1","Type":"ContainerStarted","Data":"87f980cc4c3a7562e24ec7c96943318c1e55255efb04cd61be69dc306985533f"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.761059 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" event={"ID":"26fa2fe6-70bb-4a70-8bee-b0cde872beb1","Type":"ContainerStarted","Data":"766e5b14250f73aca5751d109b79c56a617320dcf728b6c9d38e32dcfe912f86"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.769058 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" event={"ID":"2e0bfce4-bfd5-49ae-858f-647f5f8a919e","Type":"ContainerStarted","Data":"2c2413323d0ff15ad70526337da6ca392daa03f773dbb93acd72bae64b20fb4f"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.769365 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" event={"ID":"2e0bfce4-bfd5-49ae-858f-647f5f8a919e","Type":"ContainerStarted","Data":"e72a366a11c72f29136f6c6e55e516867e60ac1132b3e5f42602ec950f63b94f"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.769648 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.776836 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852" event={"ID":"ebffce09-7b77-4c54-9c5a-520517cc3aa8","Type":"ContainerStarted","Data":"cb2576ec5fe90ceb6ea9b1a058afeada392d8a69236c4b8230a0d6bed0f9603a"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.777863 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852" Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.784710 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5" event={"ID":"25671f72-e601-41d8-9617-fb9c436e7959","Type":"ContainerStarted","Data":"2bc8eb3b18fcc8f9b03cb8fcf50a4caf0b3308519dda8444a83d5c6bacf26a0d"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.791415 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" event={"ID":"c286a8e0-15ff-4705-a03f-bca226144360","Type":"ContainerStarted","Data":"f7d8893e679dacdaacee467ea86ab0ca613886626628b40a6902e325f825b4e8"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.791446 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" event={"ID":"c286a8e0-15ff-4705-a03f-bca226144360","Type":"ContainerStarted","Data":"174ace74e4d938655a852940021e31e887ca9a2a8c73ee982c1efd2807b63f67"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.791609 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.793998 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" event={"ID":"ab5cdcbf-c82f-48be-a97d-65a856e95bd9","Type":"ContainerStarted","Data":"794f934f63b65d43fa357b5da77c2ebc267d420b49dbf48a810bd3460df09e3a"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.806588 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" podStartSLOduration=5.827475945 podStartE2EDuration="35.806575035s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.55744607 +0000 UTC m=+1069.439332065" lastFinishedPulling="2025-12-05 19:32:00.53654516 +0000 UTC m=+1099.418431155" observedRunningTime="2025-12-05 19:32:03.797770154 +0000 UTC m=+1102.679656149" watchObservedRunningTime="2025-12-05 19:32:03.806575035 +0000 UTC m=+1102.688461030" Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.808415 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6" event={"ID":"c959458c-8a9a-4d37-889a-577a673e5305","Type":"ContainerStarted","Data":"003d025600fe86d7df4e22f50dce925b374f147558ff28f474b35ec5158f91cf"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.808735 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6" Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.814515 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf" event={"ID":"5cd3d875-b57b-4b61-ac66-17035d351f35","Type":"ContainerStarted","Data":"7e5da7d673ef0ae77b60a88039548ff64c09bbfdae83ec519da50a27482b1921"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.820681 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7" event={"ID":"7b1c4531-0231-42d4-94e9-0a211394dfa6","Type":"ContainerStarted","Data":"ba7bb5cfae830effbbdb263338f6199a0a6df73dbafe4c5c63818cb0528659ef"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.830245 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" event={"ID":"cf51df5f-9adb-4929-9a00-6bfeafdfa069","Type":"ContainerStarted","Data":"19aaa073a556f2c5c1d23c1d58bf6cb7a384b10835d2d2b3a1e47a033095113b"} Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.830681 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.830868 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852" podStartSLOduration=2.4808891 podStartE2EDuration="35.830846391s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:29.437857824 +0000 UTC m=+1068.319743819" lastFinishedPulling="2025-12-05 19:32:02.787815115 +0000 UTC m=+1101.669701110" observedRunningTime="2025-12-05 19:32:03.821453567 +0000 UTC m=+1102.703339562" watchObservedRunningTime="2025-12-05 19:32:03.830846391 +0000 UTC m=+1102.712732386" Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.880883 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" podStartSLOduration=32.616942038 podStartE2EDuration="35.88086415s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:57.630793436 +0000 UTC m=+1096.512679431" lastFinishedPulling="2025-12-05 19:32:00.894715558 +0000 UTC m=+1099.776601543" observedRunningTime="2025-12-05 19:32:03.855658091 +0000 UTC m=+1102.737544086" watchObservedRunningTime="2025-12-05 19:32:03.88086415 +0000 UTC m=+1102.762750145" Dec 05 19:32:03 crc kubenswrapper[4982]: I1205 19:32:03.953296 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" podStartSLOduration=2.741939511 podStartE2EDuration="34.953275027s" podCreationTimestamp="2025-12-05 19:31:29 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.627462651 +0000 UTC m=+1069.509348646" lastFinishedPulling="2025-12-05 19:32:02.838798157 +0000 UTC m=+1101.720684162" observedRunningTime="2025-12-05 19:32:03.934343175 +0000 UTC m=+1102.816229170" watchObservedRunningTime="2025-12-05 19:32:03.953275027 +0000 UTC m=+1102.835161012" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.003097 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6" podStartSLOduration=3.757082052 podStartE2EDuration="36.003083s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.539463048 +0000 UTC m=+1069.421349043" lastFinishedPulling="2025-12-05 19:32:02.785463996 +0000 UTC m=+1101.667349991" observedRunningTime="2025-12-05 19:32:03.998138897 +0000 UTC m=+1102.880024892" watchObservedRunningTime="2025-12-05 19:32:04.003083 +0000 UTC m=+1102.884968995" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.851688 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-d6svn" event={"ID":"983b81df-c036-4f75-8d49-259f09235991","Type":"ContainerStarted","Data":"1853b7095249cc16ccef466eeca0e534bc6a9158a40e7f1c5ecc79953b048b2d"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.852611 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-d6svn" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.854130 4982 generic.go:334] "Generic (PLEG): container finished" podID="8dfed7dc-0417-43b9-a148-40164f83126f" containerID="d49ea0870fa204afdbd1d9e09f2dc4fd49b495e7a6371a4ce5b9fe156d8bba97" exitCode=0 Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.854183 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppf2q" event={"ID":"8dfed7dc-0417-43b9-a148-40164f83126f","Type":"ContainerDied","Data":"d49ea0870fa204afdbd1d9e09f2dc4fd49b495e7a6371a4ce5b9fe156d8bba97"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.855839 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj" event={"ID":"517bb39b-5710-45f0-b70a-694dc5b4d044","Type":"ContainerStarted","Data":"5e2cd7f6d54d0e6f883ea9298b70c62b64ed3c844a168a68d2c6ff189c4fabdc"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.856196 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.858229 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr" event={"ID":"701bf927-7bac-49a4-9435-a68ebd3ff8c4","Type":"ContainerStarted","Data":"8ecdbb331652a7996558f7b0bcb4a2b0506095de92d9d861524dd988a8f18dd4"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.858639 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.860093 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk" event={"ID":"b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a","Type":"ContainerStarted","Data":"9605a737053405a52c6a215e0ca12f326df6b2f90653fbf35ffd3c49cef69c91"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.860470 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.862579 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5" event={"ID":"25671f72-e601-41d8-9617-fb9c436e7959","Type":"ContainerStarted","Data":"67a562b6e4b931ebc01c9e4224f157c22330d3ae927dd14b66e11a51b704d3eb"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.862937 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.864123 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf" event={"ID":"5cd3d875-b57b-4b61-ac66-17035d351f35","Type":"ContainerStarted","Data":"2907ebb2faa0d08124c98f3fe3af7ab7ab7fd7ebe45bceb269d84aed12a059cc"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.864482 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.865907 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7" event={"ID":"7b1c4531-0231-42d4-94e9-0a211394dfa6","Type":"ContainerStarted","Data":"138d818a85b2218b65a202f4ebdac62c3ec007e7a726fa53b32413b87dac9df4"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.866315 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.867759 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" event={"ID":"d4adcb8f-8951-4c59-adf3-e94b1a5e202b","Type":"ContainerStarted","Data":"5b01051cfc964f182841a1d98c10b2ab646b8f8cd335df34a0dbfebe39d20a58"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.868086 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.869412 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw" event={"ID":"511305c1-7bff-43ce-b398-e5aec02fa9ec","Type":"ContainerStarted","Data":"0ada51ab977a874f0ad44cc7e6194e5f9343aab7921dec3c6108b6cd00071abf"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.869753 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.870948 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn" event={"ID":"55a262ed-8b7c-4e28-af82-89c5df1f675b","Type":"ContainerStarted","Data":"ff24711e0bf3dbcc30f77099e78010b858a0ccdece5ec3eb9196c4ecc804a34e"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.871390 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.872554 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76" event={"ID":"a964acf3-fa80-4561-86da-c831a10fc58e","Type":"ContainerStarted","Data":"d86862041a6b43a26a93b8bf66caaccbf97bf33ad55d4c9133e8f540d3ce85ce"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.872897 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.874196 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz" event={"ID":"b4d06e27-b91f-4602-b327-3435d8977280","Type":"ContainerStarted","Data":"7ad9193585267ee0d15138ec734bb165354827297e9897bbfe5ed34d2022a903"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.874531 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.876043 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p" event={"ID":"d30f627a-9e43-4435-aaf3-31a0631bfcba","Type":"ContainerStarted","Data":"e82498c28e7b56c35e0ff3990338bcdc5c5bd3addbcb2a3266c005a83af70f64"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.876451 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.878392 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" event={"ID":"ab5cdcbf-c82f-48be-a97d-65a856e95bd9","Type":"ContainerStarted","Data":"f55e7340dfc976ff927b12d6146baa9990b03d9a8f55d62a455ee638fbf65399"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.878775 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.880756 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr" event={"ID":"167e755e-d998-47ca-88dd-0bc17c975864","Type":"ContainerStarted","Data":"ee29c49a64062156cd0fdfcd3e5d948734db6f7cac2b7163682b618336ec157a"} Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.880825 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.880844 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.882903 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.899502 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-d6svn" podStartSLOduration=4.43824354 podStartE2EDuration="36.899486981s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.533335234 +0000 UTC m=+1069.415221229" lastFinishedPulling="2025-12-05 19:32:02.994578675 +0000 UTC m=+1101.876464670" observedRunningTime="2025-12-05 19:32:04.896294141 +0000 UTC m=+1103.778180136" watchObservedRunningTime="2025-12-05 19:32:04.899486981 +0000 UTC m=+1103.781372976" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.927301 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8ttpr" podStartSLOduration=4.530052724 podStartE2EDuration="36.927287545s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.534696328 +0000 UTC m=+1069.416582323" lastFinishedPulling="2025-12-05 19:32:02.931931139 +0000 UTC m=+1101.813817144" observedRunningTime="2025-12-05 19:32:04.924339121 +0000 UTC m=+1103.806225116" watchObservedRunningTime="2025-12-05 19:32:04.927287545 +0000 UTC m=+1103.809173540" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.944444 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr" podStartSLOduration=2.922794116 podStartE2EDuration="36.944423513s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.229409001 +0000 UTC m=+1069.111294996" lastFinishedPulling="2025-12-05 19:32:04.251038388 +0000 UTC m=+1103.132924393" observedRunningTime="2025-12-05 19:32:04.942912005 +0000 UTC m=+1103.824798010" watchObservedRunningTime="2025-12-05 19:32:04.944423513 +0000 UTC m=+1103.826309508" Dec 05 19:32:04 crc kubenswrapper[4982]: I1205 19:32:04.985883 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz" podStartSLOduration=4.398867393 podStartE2EDuration="36.985861727s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.529887467 +0000 UTC m=+1069.411773462" lastFinishedPulling="2025-12-05 19:32:03.116881801 +0000 UTC m=+1101.998767796" observedRunningTime="2025-12-05 19:32:04.982467232 +0000 UTC m=+1103.864353247" watchObservedRunningTime="2025-12-05 19:32:04.985861727 +0000 UTC m=+1103.867747732" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.008112 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" podStartSLOduration=4.586105768 podStartE2EDuration="37.008094902s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.545317535 +0000 UTC m=+1069.427203530" lastFinishedPulling="2025-12-05 19:32:02.967306679 +0000 UTC m=+1101.849192664" observedRunningTime="2025-12-05 19:32:05.00161346 +0000 UTC m=+1103.883499465" watchObservedRunningTime="2025-12-05 19:32:05.008094902 +0000 UTC m=+1103.889980897" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.049066 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn" podStartSLOduration=4.259339023 podStartE2EDuration="37.049046614s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.142805053 +0000 UTC m=+1069.024691048" lastFinishedPulling="2025-12-05 19:32:02.932512644 +0000 UTC m=+1101.814398639" observedRunningTime="2025-12-05 19:32:05.043445554 +0000 UTC m=+1103.925331549" watchObservedRunningTime="2025-12-05 19:32:05.049046614 +0000 UTC m=+1103.930932609" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.074332 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p" podStartSLOduration=4.216717186 podStartE2EDuration="37.074309994s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.119073116 +0000 UTC m=+1069.000959111" lastFinishedPulling="2025-12-05 19:32:02.976665934 +0000 UTC m=+1101.858551919" observedRunningTime="2025-12-05 19:32:05.069916585 +0000 UTC m=+1103.951802580" watchObservedRunningTime="2025-12-05 19:32:05.074309994 +0000 UTC m=+1103.956196009" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.096411 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7" podStartSLOduration=3.687786998 podStartE2EDuration="37.096394705s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.501063962 +0000 UTC m=+1069.382949957" lastFinishedPulling="2025-12-05 19:32:03.909671669 +0000 UTC m=+1102.791557664" observedRunningTime="2025-12-05 19:32:05.086346965 +0000 UTC m=+1103.968232960" watchObservedRunningTime="2025-12-05 19:32:05.096394705 +0000 UTC m=+1103.978280701" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.116651 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" podStartSLOduration=33.810268612 podStartE2EDuration="37.116632361s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:57.625615156 +0000 UTC m=+1096.507501151" lastFinishedPulling="2025-12-05 19:32:00.931978905 +0000 UTC m=+1099.813864900" observedRunningTime="2025-12-05 19:32:05.114775474 +0000 UTC m=+1103.996661469" watchObservedRunningTime="2025-12-05 19:32:05.116632361 +0000 UTC m=+1103.998518356" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.135894 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76" podStartSLOduration=4.798511405 podStartE2EDuration="37.135877051s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.506515499 +0000 UTC m=+1069.388401494" lastFinishedPulling="2025-12-05 19:32:02.843881145 +0000 UTC m=+1101.725767140" observedRunningTime="2025-12-05 19:32:05.130784974 +0000 UTC m=+1104.012670969" watchObservedRunningTime="2025-12-05 19:32:05.135877051 +0000 UTC m=+1104.017763036" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.153761 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk" podStartSLOduration=3.962549849 podStartE2EDuration="37.153748557s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:29.741194433 +0000 UTC m=+1068.623080428" lastFinishedPulling="2025-12-05 19:32:02.932393141 +0000 UTC m=+1101.814279136" observedRunningTime="2025-12-05 19:32:05.151205903 +0000 UTC m=+1104.033091898" watchObservedRunningTime="2025-12-05 19:32:05.153748557 +0000 UTC m=+1104.035634542" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.178839 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5" podStartSLOduration=3.055229182 podStartE2EDuration="37.178824053s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.131507079 +0000 UTC m=+1069.013393074" lastFinishedPulling="2025-12-05 19:32:04.25510195 +0000 UTC m=+1103.136987945" observedRunningTime="2025-12-05 19:32:05.17672364 +0000 UTC m=+1104.058609645" watchObservedRunningTime="2025-12-05 19:32:05.178824053 +0000 UTC m=+1104.060710048" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.198337 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw" podStartSLOduration=3.978265355 podStartE2EDuration="37.198315139s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:29.712139472 +0000 UTC m=+1068.594025467" lastFinishedPulling="2025-12-05 19:32:02.932189256 +0000 UTC m=+1101.814075251" observedRunningTime="2025-12-05 19:32:05.194824882 +0000 UTC m=+1104.076710877" watchObservedRunningTime="2025-12-05 19:32:05.198315139 +0000 UTC m=+1104.080201134" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.217226 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" podStartSLOduration=7.049432136 podStartE2EDuration="37.217209861s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.542876174 +0000 UTC m=+1069.424762169" lastFinishedPulling="2025-12-05 19:32:00.710653899 +0000 UTC m=+1099.592539894" observedRunningTime="2025-12-05 19:32:05.215091418 +0000 UTC m=+1104.096977423" watchObservedRunningTime="2025-12-05 19:32:05.217209861 +0000 UTC m=+1104.099095856" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.242803 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj" podStartSLOduration=4.393714195 podStartE2EDuration="37.242783699s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.136038063 +0000 UTC m=+1069.017924058" lastFinishedPulling="2025-12-05 19:32:02.985107567 +0000 UTC m=+1101.866993562" observedRunningTime="2025-12-05 19:32:05.241535178 +0000 UTC m=+1104.123421183" watchObservedRunningTime="2025-12-05 19:32:05.242783699 +0000 UTC m=+1104.124669694" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.274543 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf" podStartSLOduration=3.092257916 podStartE2EDuration="37.274526781s" podCreationTimestamp="2025-12-05 19:31:28 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.132311509 +0000 UTC m=+1069.014197504" lastFinishedPulling="2025-12-05 19:32:04.314580374 +0000 UTC m=+1103.196466369" observedRunningTime="2025-12-05 19:32:05.270712716 +0000 UTC m=+1104.152598731" watchObservedRunningTime="2025-12-05 19:32:05.274526781 +0000 UTC m=+1104.156412776" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.889040 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppf2q" event={"ID":"8dfed7dc-0417-43b9-a148-40164f83126f","Type":"ContainerStarted","Data":"591fd55efd91f8aee500f539b0736ff20b56729f128ab68cc14a68d2b808776f"} Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.892632 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-7rqsz" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.893327 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-p6gnj" Dec 05 19:32:05 crc kubenswrapper[4982]: I1205 19:32:05.921730 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ppf2q" podStartSLOduration=29.263813712 podStartE2EDuration="31.921711163s" podCreationTimestamp="2025-12-05 19:31:34 +0000 UTC" firstStartedPulling="2025-12-05 19:32:02.562969941 +0000 UTC m=+1101.444855936" lastFinishedPulling="2025-12-05 19:32:05.220867392 +0000 UTC m=+1104.102753387" observedRunningTime="2025-12-05 19:32:05.905314384 +0000 UTC m=+1104.787200389" watchObservedRunningTime="2025-12-05 19:32:05.921711163 +0000 UTC m=+1104.803597158" Dec 05 19:32:06 crc kubenswrapper[4982]: I1205 19:32:06.900107 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-rvzdw" Dec 05 19:32:06 crc kubenswrapper[4982]: I1205 19:32:06.900699 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-ctg6p" Dec 05 19:32:06 crc kubenswrapper[4982]: I1205 19:32:06.900791 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-d6svn" Dec 05 19:32:06 crc kubenswrapper[4982]: I1205 19:32:06.900903 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-472kw" Dec 05 19:32:06 crc kubenswrapper[4982]: I1205 19:32:06.901061 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-xww76" Dec 05 19:32:06 crc kubenswrapper[4982]: I1205 19:32:06.901539 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-t2zqn" Dec 05 19:32:06 crc kubenswrapper[4982]: I1205 19:32:06.902460 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-wrfxk" Dec 05 19:32:08 crc kubenswrapper[4982]: I1205 19:32:08.758091 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-f8852" Dec 05 19:32:09 crc kubenswrapper[4982]: I1205 19:32:09.064571 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-ms8zf" Dec 05 19:32:09 crc kubenswrapper[4982]: I1205 19:32:09.153170 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-w8qcr" Dec 05 19:32:09 crc kubenswrapper[4982]: I1205 19:32:09.212339 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-7hbtv" Dec 05 19:32:09 crc kubenswrapper[4982]: I1205 19:32:09.400764 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-6b4849bfff-skwf7" Dec 05 19:32:09 crc kubenswrapper[4982]: I1205 19:32:09.511803 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-d48dh" Dec 05 19:32:09 crc kubenswrapper[4982]: I1205 19:32:09.579829 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-kzpx6" Dec 05 19:32:09 crc kubenswrapper[4982]: I1205 19:32:09.702761 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-829rq" Dec 05 19:32:12 crc kubenswrapper[4982]: E1205 19:32:12.394352 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" podUID="1f84d21b-6ce8-4c97-a104-cb308ce8527d" Dec 05 19:32:14 crc kubenswrapper[4982]: I1205 19:32:14.531846 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-fnkqx" Dec 05 19:32:14 crc kubenswrapper[4982]: I1205 19:32:14.751843 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:32:14 crc kubenswrapper[4982]: I1205 19:32:14.752124 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:32:14 crc kubenswrapper[4982]: I1205 19:32:14.824472 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:32:15 crc kubenswrapper[4982]: I1205 19:32:15.030171 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:32:15 crc kubenswrapper[4982]: I1205 19:32:15.085258 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppf2q"] Dec 05 19:32:15 crc kubenswrapper[4982]: I1205 19:32:15.159724 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl" Dec 05 19:32:15 crc kubenswrapper[4982]: I1205 19:32:15.416568 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-54bf4fb767-47tg5" Dec 05 19:32:16 crc kubenswrapper[4982]: I1205 19:32:16.983064 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ppf2q" podUID="8dfed7dc-0417-43b9-a148-40164f83126f" containerName="registry-server" containerID="cri-o://591fd55efd91f8aee500f539b0736ff20b56729f128ab68cc14a68d2b808776f" gracePeriod=2 Dec 05 19:32:17 crc kubenswrapper[4982]: I1205 19:32:17.992438 4982 generic.go:334] "Generic (PLEG): container finished" podID="8dfed7dc-0417-43b9-a148-40164f83126f" containerID="591fd55efd91f8aee500f539b0736ff20b56729f128ab68cc14a68d2b808776f" exitCode=0 Dec 05 19:32:17 crc kubenswrapper[4982]: I1205 19:32:17.992761 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppf2q" event={"ID":"8dfed7dc-0417-43b9-a148-40164f83126f","Type":"ContainerDied","Data":"591fd55efd91f8aee500f539b0736ff20b56729f128ab68cc14a68d2b808776f"} Dec 05 19:32:18 crc kubenswrapper[4982]: I1205 19:32:18.893780 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-rrjv5" Dec 05 19:32:19 crc kubenswrapper[4982]: I1205 19:32:19.685513 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:32:19 crc kubenswrapper[4982]: I1205 19:32:19.811415 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8dfed7dc-0417-43b9-a148-40164f83126f-utilities\") pod \"8dfed7dc-0417-43b9-a148-40164f83126f\" (UID: \"8dfed7dc-0417-43b9-a148-40164f83126f\") " Dec 05 19:32:19 crc kubenswrapper[4982]: I1205 19:32:19.811526 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bg7tl\" (UniqueName: \"kubernetes.io/projected/8dfed7dc-0417-43b9-a148-40164f83126f-kube-api-access-bg7tl\") pod \"8dfed7dc-0417-43b9-a148-40164f83126f\" (UID: \"8dfed7dc-0417-43b9-a148-40164f83126f\") " Dec 05 19:32:19 crc kubenswrapper[4982]: I1205 19:32:19.811552 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8dfed7dc-0417-43b9-a148-40164f83126f-catalog-content\") pod \"8dfed7dc-0417-43b9-a148-40164f83126f\" (UID: \"8dfed7dc-0417-43b9-a148-40164f83126f\") " Dec 05 19:32:19 crc kubenswrapper[4982]: I1205 19:32:19.812459 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8dfed7dc-0417-43b9-a148-40164f83126f-utilities" (OuterVolumeSpecName: "utilities") pod "8dfed7dc-0417-43b9-a148-40164f83126f" (UID: "8dfed7dc-0417-43b9-a148-40164f83126f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:32:19 crc kubenswrapper[4982]: I1205 19:32:19.824475 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dfed7dc-0417-43b9-a148-40164f83126f-kube-api-access-bg7tl" (OuterVolumeSpecName: "kube-api-access-bg7tl") pod "8dfed7dc-0417-43b9-a148-40164f83126f" (UID: "8dfed7dc-0417-43b9-a148-40164f83126f"). InnerVolumeSpecName "kube-api-access-bg7tl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:32:19 crc kubenswrapper[4982]: I1205 19:32:19.827435 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8dfed7dc-0417-43b9-a148-40164f83126f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8dfed7dc-0417-43b9-a148-40164f83126f" (UID: "8dfed7dc-0417-43b9-a148-40164f83126f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:32:19 crc kubenswrapper[4982]: I1205 19:32:19.913605 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bg7tl\" (UniqueName: \"kubernetes.io/projected/8dfed7dc-0417-43b9-a148-40164f83126f-kube-api-access-bg7tl\") on node \"crc\" DevicePath \"\"" Dec 05 19:32:19 crc kubenswrapper[4982]: I1205 19:32:19.913862 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8dfed7dc-0417-43b9-a148-40164f83126f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:32:19 crc kubenswrapper[4982]: I1205 19:32:19.913875 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8dfed7dc-0417-43b9-a148-40164f83126f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:32:20 crc kubenswrapper[4982]: I1205 19:32:20.017832 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ppf2q" event={"ID":"8dfed7dc-0417-43b9-a148-40164f83126f","Type":"ContainerDied","Data":"a55df9186acc2976354eadd79e047cfcd9603fb8fd957975f8ee711919a2488c"} Dec 05 19:32:20 crc kubenswrapper[4982]: I1205 19:32:20.017881 4982 scope.go:117] "RemoveContainer" containerID="591fd55efd91f8aee500f539b0736ff20b56729f128ab68cc14a68d2b808776f" Dec 05 19:32:20 crc kubenswrapper[4982]: I1205 19:32:20.017919 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ppf2q" Dec 05 19:32:20 crc kubenswrapper[4982]: I1205 19:32:20.038894 4982 scope.go:117] "RemoveContainer" containerID="d49ea0870fa204afdbd1d9e09f2dc4fd49b495e7a6371a4ce5b9fe156d8bba97" Dec 05 19:32:20 crc kubenswrapper[4982]: I1205 19:32:20.054782 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppf2q"] Dec 05 19:32:20 crc kubenswrapper[4982]: I1205 19:32:20.060190 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ppf2q"] Dec 05 19:32:20 crc kubenswrapper[4982]: I1205 19:32:20.069552 4982 scope.go:117] "RemoveContainer" containerID="d0c98165d19288930733d64bc39d618b0e1d3b0bfa5cf1c8676efe36cf8253de" Dec 05 19:32:21 crc kubenswrapper[4982]: I1205 19:32:21.407221 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dfed7dc-0417-43b9-a148-40164f83126f" path="/var/lib/kubelet/pods/8dfed7dc-0417-43b9-a148-40164f83126f/volumes" Dec 05 19:32:25 crc kubenswrapper[4982]: I1205 19:32:25.063847 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" event={"ID":"1f84d21b-6ce8-4c97-a104-cb308ce8527d","Type":"ContainerStarted","Data":"1dbc4a037955629e440e9bbbe57614f2a0223fa7ccd977f1ef3026e0d631e4d6"} Dec 05 19:32:25 crc kubenswrapper[4982]: I1205 19:32:25.085685 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-wst8r" podStartSLOduration=2.686468781 podStartE2EDuration="56.085660832s" podCreationTimestamp="2025-12-05 19:31:29 +0000 UTC" firstStartedPulling="2025-12-05 19:31:30.628771684 +0000 UTC m=+1069.510657679" lastFinishedPulling="2025-12-05 19:32:24.027963725 +0000 UTC m=+1122.909849730" observedRunningTime="2025-12-05 19:32:25.079826847 +0000 UTC m=+1123.961712882" watchObservedRunningTime="2025-12-05 19:32:25.085660832 +0000 UTC m=+1123.967546837" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.228682 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4wssq"] Dec 05 19:32:42 crc kubenswrapper[4982]: E1205 19:32:42.229697 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dfed7dc-0417-43b9-a148-40164f83126f" containerName="registry-server" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.229718 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dfed7dc-0417-43b9-a148-40164f83126f" containerName="registry-server" Dec 05 19:32:42 crc kubenswrapper[4982]: E1205 19:32:42.229744 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dfed7dc-0417-43b9-a148-40164f83126f" containerName="extract-content" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.229753 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dfed7dc-0417-43b9-a148-40164f83126f" containerName="extract-content" Dec 05 19:32:42 crc kubenswrapper[4982]: E1205 19:32:42.229786 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dfed7dc-0417-43b9-a148-40164f83126f" containerName="extract-utilities" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.229795 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dfed7dc-0417-43b9-a148-40164f83126f" containerName="extract-utilities" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.229972 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dfed7dc-0417-43b9-a148-40164f83126f" containerName="registry-server" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.230989 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.233546 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-cdnpx" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.233875 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.234065 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.234408 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.246267 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4wssq"] Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.317038 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hd4vz"] Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.318290 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.320174 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.346269 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hd4vz"] Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.363534 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gwsv5\" (UniqueName: \"kubernetes.io/projected/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48-kube-api-access-gwsv5\") pod \"dnsmasq-dns-675f4bcbfc-4wssq\" (UID: \"fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.363978 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48-config\") pod \"dnsmasq-dns-675f4bcbfc-4wssq\" (UID: \"fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.465645 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-config\") pod \"dnsmasq-dns-78dd6ddcc-hd4vz\" (UID: \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.465722 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48-config\") pod \"dnsmasq-dns-675f4bcbfc-4wssq\" (UID: \"fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.465799 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-hd4vz\" (UID: \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.465911 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sxn7\" (UniqueName: \"kubernetes.io/projected/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-kube-api-access-6sxn7\") pod \"dnsmasq-dns-78dd6ddcc-hd4vz\" (UID: \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.465981 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gwsv5\" (UniqueName: \"kubernetes.io/projected/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48-kube-api-access-gwsv5\") pod \"dnsmasq-dns-675f4bcbfc-4wssq\" (UID: \"fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.466633 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48-config\") pod \"dnsmasq-dns-675f4bcbfc-4wssq\" (UID: \"fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.488226 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gwsv5\" (UniqueName: \"kubernetes.io/projected/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48-kube-api-access-gwsv5\") pod \"dnsmasq-dns-675f4bcbfc-4wssq\" (UID: \"fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48\") " pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.549511 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.566892 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-config\") pod \"dnsmasq-dns-78dd6ddcc-hd4vz\" (UID: \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.567017 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-hd4vz\" (UID: \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.567047 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sxn7\" (UniqueName: \"kubernetes.io/projected/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-kube-api-access-6sxn7\") pod \"dnsmasq-dns-78dd6ddcc-hd4vz\" (UID: \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.568861 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-hd4vz\" (UID: \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.569115 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-config\") pod \"dnsmasq-dns-78dd6ddcc-hd4vz\" (UID: \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.614414 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sxn7\" (UniqueName: \"kubernetes.io/projected/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-kube-api-access-6sxn7\") pod \"dnsmasq-dns-78dd6ddcc-hd4vz\" (UID: \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\") " pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.636239 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:32:42 crc kubenswrapper[4982]: W1205 19:32:42.989112 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc04c3f9_68d7_4513_9a8f_ce3c1c9bee48.slice/crio-94ed5879cc1fb63a03304672f0e89cda74a1b87eca438628d4cbbdd31f5735fb WatchSource:0}: Error finding container 94ed5879cc1fb63a03304672f0e89cda74a1b87eca438628d4cbbdd31f5735fb: Status 404 returned error can't find the container with id 94ed5879cc1fb63a03304672f0e89cda74a1b87eca438628d4cbbdd31f5735fb Dec 05 19:32:42 crc kubenswrapper[4982]: I1205 19:32:42.991747 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4wssq"] Dec 05 19:32:43 crc kubenswrapper[4982]: I1205 19:32:43.102778 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hd4vz"] Dec 05 19:32:43 crc kubenswrapper[4982]: I1205 19:32:43.237387 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" event={"ID":"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6","Type":"ContainerStarted","Data":"577c828cb672f9b7ce5ca1137083648228c88432c449d4dccd262b43308f8128"} Dec 05 19:32:43 crc kubenswrapper[4982]: I1205 19:32:43.238650 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" event={"ID":"fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48","Type":"ContainerStarted","Data":"94ed5879cc1fb63a03304672f0e89cda74a1b87eca438628d4cbbdd31f5735fb"} Dec 05 19:32:45 crc kubenswrapper[4982]: I1205 19:32:45.728623 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4wssq"] Dec 05 19:32:45 crc kubenswrapper[4982]: I1205 19:32:45.763941 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-ft2hj"] Dec 05 19:32:45 crc kubenswrapper[4982]: I1205 19:32:45.765629 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:32:45 crc kubenswrapper[4982]: I1205 19:32:45.776930 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-ft2hj"] Dec 05 19:32:45 crc kubenswrapper[4982]: I1205 19:32:45.921519 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlffv\" (UniqueName: \"kubernetes.io/projected/cdd17104-44d2-452c-a6c2-50a2884a7a6f-kube-api-access-mlffv\") pod \"dnsmasq-dns-666b6646f7-ft2hj\" (UID: \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\") " pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:32:45 crc kubenswrapper[4982]: I1205 19:32:45.921607 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdd17104-44d2-452c-a6c2-50a2884a7a6f-config\") pod \"dnsmasq-dns-666b6646f7-ft2hj\" (UID: \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\") " pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:32:45 crc kubenswrapper[4982]: I1205 19:32:45.921662 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cdd17104-44d2-452c-a6c2-50a2884a7a6f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-ft2hj\" (UID: \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\") " pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.019476 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hd4vz"] Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.022385 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdd17104-44d2-452c-a6c2-50a2884a7a6f-config\") pod \"dnsmasq-dns-666b6646f7-ft2hj\" (UID: \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\") " pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.022441 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cdd17104-44d2-452c-a6c2-50a2884a7a6f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-ft2hj\" (UID: \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\") " pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.022510 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlffv\" (UniqueName: \"kubernetes.io/projected/cdd17104-44d2-452c-a6c2-50a2884a7a6f-kube-api-access-mlffv\") pod \"dnsmasq-dns-666b6646f7-ft2hj\" (UID: \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\") " pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.023511 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cdd17104-44d2-452c-a6c2-50a2884a7a6f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-ft2hj\" (UID: \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\") " pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.023533 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdd17104-44d2-452c-a6c2-50a2884a7a6f-config\") pod \"dnsmasq-dns-666b6646f7-ft2hj\" (UID: \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\") " pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.042541 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-vfqzw"] Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.043755 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.051698 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlffv\" (UniqueName: \"kubernetes.io/projected/cdd17104-44d2-452c-a6c2-50a2884a7a6f-kube-api-access-mlffv\") pod \"dnsmasq-dns-666b6646f7-ft2hj\" (UID: \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\") " pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.063552 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-vfqzw"] Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.090825 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.225322 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41538066-4378-432a-8e98-d3816216af50-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-vfqzw\" (UID: \"41538066-4378-432a-8e98-d3816216af50\") " pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.225372 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41538066-4378-432a-8e98-d3816216af50-config\") pod \"dnsmasq-dns-57d769cc4f-vfqzw\" (UID: \"41538066-4378-432a-8e98-d3816216af50\") " pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.225405 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82jr4\" (UniqueName: \"kubernetes.io/projected/41538066-4378-432a-8e98-d3816216af50-kube-api-access-82jr4\") pod \"dnsmasq-dns-57d769cc4f-vfqzw\" (UID: \"41538066-4378-432a-8e98-d3816216af50\") " pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.327092 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41538066-4378-432a-8e98-d3816216af50-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-vfqzw\" (UID: \"41538066-4378-432a-8e98-d3816216af50\") " pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.327132 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41538066-4378-432a-8e98-d3816216af50-config\") pod \"dnsmasq-dns-57d769cc4f-vfqzw\" (UID: \"41538066-4378-432a-8e98-d3816216af50\") " pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.327174 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82jr4\" (UniqueName: \"kubernetes.io/projected/41538066-4378-432a-8e98-d3816216af50-kube-api-access-82jr4\") pod \"dnsmasq-dns-57d769cc4f-vfqzw\" (UID: \"41538066-4378-432a-8e98-d3816216af50\") " pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.328067 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41538066-4378-432a-8e98-d3816216af50-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-vfqzw\" (UID: \"41538066-4378-432a-8e98-d3816216af50\") " pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.328092 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41538066-4378-432a-8e98-d3816216af50-config\") pod \"dnsmasq-dns-57d769cc4f-vfqzw\" (UID: \"41538066-4378-432a-8e98-d3816216af50\") " pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.347954 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82jr4\" (UniqueName: \"kubernetes.io/projected/41538066-4378-432a-8e98-d3816216af50-kube-api-access-82jr4\") pod \"dnsmasq-dns-57d769cc4f-vfqzw\" (UID: \"41538066-4378-432a-8e98-d3816216af50\") " pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.395769 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.892728 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.894278 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.896828 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.897005 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.897142 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.897328 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.897457 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-5c7gt" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.897597 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.897788 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 19:32:46 crc kubenswrapper[4982]: I1205 19:32:46.907017 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.035721 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/145ed592-ad9f-487f-940e-71b78c2f48e1-pod-info\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.035781 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.035833 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.035856 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.035890 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.035915 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/145ed592-ad9f-487f-940e-71b78c2f48e1-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.035941 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.035982 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-config-data\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.036003 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.036042 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffm7r\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-kube-api-access-ffm7r\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.036072 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-server-conf\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.137116 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.137181 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.137224 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.137246 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/145ed592-ad9f-487f-940e-71b78c2f48e1-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.137271 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.137310 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-config-data\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.137331 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.137373 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffm7r\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-kube-api-access-ffm7r\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.137407 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-server-conf\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.137453 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/145ed592-ad9f-487f-940e-71b78c2f48e1-pod-info\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.137479 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.137982 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.138254 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.138473 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.138586 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-config-data\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.141685 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-server-conf\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.164843 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.165306 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.168994 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.169304 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e6ead355b67baeed78bccda2d66d28a6193337f668634f8727812ed92695a9b2/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.169630 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/145ed592-ad9f-487f-940e-71b78c2f48e1-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.176212 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/145ed592-ad9f-487f-940e-71b78c2f48e1-pod-info\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.215707 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffm7r\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-kube-api-access-ffm7r\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.307162 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.308734 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.326628 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.326872 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-jg4zm" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.326999 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.327102 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.327274 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.327493 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.327729 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.330854 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.342520 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\") pod \"rabbitmq-server-0\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.448980 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.449035 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2c4d593c-5baa-4b09-b586-7b0e65acaa73-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.449078 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.449096 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.449119 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56k8j\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-kube-api-access-56k8j\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.449656 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.449720 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.449777 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.449808 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.449850 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.449889 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2c4d593c-5baa-4b09-b586-7b0e65acaa73-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.515600 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.551408 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2c4d593c-5baa-4b09-b586-7b0e65acaa73-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.551460 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.551481 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.551502 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56k8j\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-kube-api-access-56k8j\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.551540 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.551566 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.551592 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.551616 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.551643 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.551663 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2c4d593c-5baa-4b09-b586-7b0e65acaa73-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.551683 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.552475 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.552763 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.552996 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.553334 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.554243 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.555126 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.555170 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ff9c192ce0064b767547340d30e9eff06237e32ce1f9d3aedb8d855c64d41efa/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.558573 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.559617 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2c4d593c-5baa-4b09-b586-7b0e65acaa73-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.559678 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.561304 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2c4d593c-5baa-4b09-b586-7b0e65acaa73-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.571249 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56k8j\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-kube-api-access-56k8j\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.600695 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\") pod \"rabbitmq-cell1-server-0\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:47 crc kubenswrapper[4982]: I1205 19:32:47.632232 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.501405 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.505251 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.509709 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.509823 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.510021 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.512064 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-q7cjc" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.516875 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.517865 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.669293 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b6542514-631f-4a81-aba8-11cfebd33048-config-data-default\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.669349 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f68xv\" (UniqueName: \"kubernetes.io/projected/b6542514-631f-4a81-aba8-11cfebd33048-kube-api-access-f68xv\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.669371 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b6542514-631f-4a81-aba8-11cfebd33048-kolla-config\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.669398 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6542514-631f-4a81-aba8-11cfebd33048-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.669429 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b6542514-631f-4a81-aba8-11cfebd33048-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.669475 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6542514-631f-4a81-aba8-11cfebd33048-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.669495 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6542514-631f-4a81-aba8-11cfebd33048-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.669515 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5a85a20d-911f-442d-882a-9466c6b787d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a85a20d-911f-442d-882a-9466c6b787d1\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.770554 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b6542514-631f-4a81-aba8-11cfebd33048-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.770630 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6542514-631f-4a81-aba8-11cfebd33048-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.770667 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6542514-631f-4a81-aba8-11cfebd33048-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.770694 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5a85a20d-911f-442d-882a-9466c6b787d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a85a20d-911f-442d-882a-9466c6b787d1\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.770781 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b6542514-631f-4a81-aba8-11cfebd33048-config-data-default\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.770811 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f68xv\" (UniqueName: \"kubernetes.io/projected/b6542514-631f-4a81-aba8-11cfebd33048-kube-api-access-f68xv\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.770825 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b6542514-631f-4a81-aba8-11cfebd33048-kolla-config\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.770855 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6542514-631f-4a81-aba8-11cfebd33048-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.771114 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b6542514-631f-4a81-aba8-11cfebd33048-config-data-generated\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.772025 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b6542514-631f-4a81-aba8-11cfebd33048-kolla-config\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.772472 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b6542514-631f-4a81-aba8-11cfebd33048-operator-scripts\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.773206 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b6542514-631f-4a81-aba8-11cfebd33048-config-data-default\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.774281 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.774519 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5a85a20d-911f-442d-882a-9466c6b787d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a85a20d-911f-442d-882a-9466c6b787d1\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1e2683bd29df1875c57dc98484306bc1840562302efe8efae99a994eb953d3b4/globalmount\"" pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.776487 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b6542514-631f-4a81-aba8-11cfebd33048-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.790700 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b6542514-631f-4a81-aba8-11cfebd33048-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.796818 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f68xv\" (UniqueName: \"kubernetes.io/projected/b6542514-631f-4a81-aba8-11cfebd33048-kube-api-access-f68xv\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.825716 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5a85a20d-911f-442d-882a-9466c6b787d1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5a85a20d-911f-442d-882a-9466c6b787d1\") pod \"openstack-galera-0\" (UID: \"b6542514-631f-4a81-aba8-11cfebd33048\") " pod="openstack/openstack-galera-0" Dec 05 19:32:48 crc kubenswrapper[4982]: I1205 19:32:48.833830 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 19:32:49 crc kubenswrapper[4982]: I1205 19:32:49.967529 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 19:32:49 crc kubenswrapper[4982]: I1205 19:32:49.968838 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:49 crc kubenswrapper[4982]: I1205 19:32:49.970637 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-mk9xs" Dec 05 19:32:49 crc kubenswrapper[4982]: I1205 19:32:49.971658 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 19:32:49 crc kubenswrapper[4982]: I1205 19:32:49.971715 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 19:32:49 crc kubenswrapper[4982]: I1205 19:32:49.971840 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 19:32:49 crc kubenswrapper[4982]: I1205 19:32:49.985181 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.096128 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/af72a355-0521-4724-8224-c7fd9046b4d6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.096191 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znbs4\" (UniqueName: \"kubernetes.io/projected/af72a355-0521-4724-8224-c7fd9046b4d6-kube-api-access-znbs4\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.096221 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af72a355-0521-4724-8224-c7fd9046b4d6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.096269 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/af72a355-0521-4724-8224-c7fd9046b4d6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.096311 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af72a355-0521-4724-8224-c7fd9046b4d6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.096347 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-d7b3e909-3afc-4740-a3df-2b9a36f0fcba\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d7b3e909-3afc-4740-a3df-2b9a36f0fcba\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.096371 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/af72a355-0521-4724-8224-c7fd9046b4d6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.096397 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/af72a355-0521-4724-8224-c7fd9046b4d6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.197258 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af72a355-0521-4724-8224-c7fd9046b4d6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.197321 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-d7b3e909-3afc-4740-a3df-2b9a36f0fcba\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d7b3e909-3afc-4740-a3df-2b9a36f0fcba\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.197349 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/af72a355-0521-4724-8224-c7fd9046b4d6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.197379 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/af72a355-0521-4724-8224-c7fd9046b4d6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.197402 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/af72a355-0521-4724-8224-c7fd9046b4d6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.197420 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znbs4\" (UniqueName: \"kubernetes.io/projected/af72a355-0521-4724-8224-c7fd9046b4d6-kube-api-access-znbs4\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.197447 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af72a355-0521-4724-8224-c7fd9046b4d6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.197489 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/af72a355-0521-4724-8224-c7fd9046b4d6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.199232 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af72a355-0521-4724-8224-c7fd9046b4d6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.199344 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.199379 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-d7b3e909-3afc-4740-a3df-2b9a36f0fcba\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d7b3e909-3afc-4740-a3df-2b9a36f0fcba\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/64a0078d6239bd305e8438e24e03d7a11e74d53d320d07e4cb1c7eee32c74b1c/globalmount\"" pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.233724 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.237224 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.239884 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-wzgm6" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.240565 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.240733 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.259375 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.264433 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-d7b3e909-3afc-4740-a3df-2b9a36f0fcba\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-d7b3e909-3afc-4740-a3df-2b9a36f0fcba\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.270803 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/af72a355-0521-4724-8224-c7fd9046b4d6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.271629 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/af72a355-0521-4724-8224-c7fd9046b4d6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.271764 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/af72a355-0521-4724-8224-c7fd9046b4d6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.271996 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af72a355-0521-4724-8224-c7fd9046b4d6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.272169 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/af72a355-0521-4724-8224-c7fd9046b4d6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.298112 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znbs4\" (UniqueName: \"kubernetes.io/projected/af72a355-0521-4724-8224-c7fd9046b4d6-kube-api-access-znbs4\") pod \"openstack-cell1-galera-0\" (UID: \"af72a355-0521-4724-8224-c7fd9046b4d6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.401177 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34fad3bb-6720-4219-8862-08492842062a-combined-ca-bundle\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.401268 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34fad3bb-6720-4219-8862-08492842062a-config-data\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.401308 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/34fad3bb-6720-4219-8862-08492842062a-memcached-tls-certs\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.401336 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34fad3bb-6720-4219-8862-08492842062a-kolla-config\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.401385 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scrzc\" (UniqueName: \"kubernetes.io/projected/34fad3bb-6720-4219-8862-08492842062a-kube-api-access-scrzc\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.502640 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34fad3bb-6720-4219-8862-08492842062a-combined-ca-bundle\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.502726 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34fad3bb-6720-4219-8862-08492842062a-config-data\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.502768 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/34fad3bb-6720-4219-8862-08492842062a-memcached-tls-certs\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.502793 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34fad3bb-6720-4219-8862-08492842062a-kolla-config\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.502850 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scrzc\" (UniqueName: \"kubernetes.io/projected/34fad3bb-6720-4219-8862-08492842062a-kube-api-access-scrzc\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.503709 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/34fad3bb-6720-4219-8862-08492842062a-config-data\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.504235 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/34fad3bb-6720-4219-8862-08492842062a-kolla-config\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.519315 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/34fad3bb-6720-4219-8862-08492842062a-memcached-tls-certs\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.519643 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34fad3bb-6720-4219-8862-08492842062a-combined-ca-bundle\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.524225 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scrzc\" (UniqueName: \"kubernetes.io/projected/34fad3bb-6720-4219-8862-08492842062a-kube-api-access-scrzc\") pod \"memcached-0\" (UID: \"34fad3bb-6720-4219-8862-08492842062a\") " pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.573418 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 19:32:50 crc kubenswrapper[4982]: I1205 19:32:50.585388 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 19:32:51 crc kubenswrapper[4982]: I1205 19:32:51.957245 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 19:32:51 crc kubenswrapper[4982]: I1205 19:32:51.960639 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 19:32:51 crc kubenswrapper[4982]: I1205 19:32:51.966523 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 19:32:51 crc kubenswrapper[4982]: I1205 19:32:51.966832 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-mf8ck" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.125665 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2h9b\" (UniqueName: \"kubernetes.io/projected/700dc7aa-a441-4419-90c8-ff6ec6d31f23-kube-api-access-h2h9b\") pod \"kube-state-metrics-0\" (UID: \"700dc7aa-a441-4419-90c8-ff6ec6d31f23\") " pod="openstack/kube-state-metrics-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.226951 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2h9b\" (UniqueName: \"kubernetes.io/projected/700dc7aa-a441-4419-90c8-ff6ec6d31f23-kube-api-access-h2h9b\") pod \"kube-state-metrics-0\" (UID: \"700dc7aa-a441-4419-90c8-ff6ec6d31f23\") " pod="openstack/kube-state-metrics-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.263880 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2h9b\" (UniqueName: \"kubernetes.io/projected/700dc7aa-a441-4419-90c8-ff6ec6d31f23-kube-api-access-h2h9b\") pod \"kube-state-metrics-0\" (UID: \"700dc7aa-a441-4419-90c8-ff6ec6d31f23\") " pod="openstack/kube-state-metrics-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.290750 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.594602 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.596657 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.598781 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-6mfk2" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.599007 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.599176 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.599370 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.610680 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.618843 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.734593 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/4e3679d1-2b65-494e-bc5f-2a68697da816-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.734646 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/4e3679d1-2b65-494e-bc5f-2a68697da816-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.734676 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/4e3679d1-2b65-494e-bc5f-2a68697da816-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.734797 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/4e3679d1-2b65-494e-bc5f-2a68697da816-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.734959 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/4e3679d1-2b65-494e-bc5f-2a68697da816-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.735002 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/4e3679d1-2b65-494e-bc5f-2a68697da816-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.736391 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvlhl\" (UniqueName: \"kubernetes.io/projected/4e3679d1-2b65-494e-bc5f-2a68697da816-kube-api-access-xvlhl\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.837303 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvlhl\" (UniqueName: \"kubernetes.io/projected/4e3679d1-2b65-494e-bc5f-2a68697da816-kube-api-access-xvlhl\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.837369 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/4e3679d1-2b65-494e-bc5f-2a68697da816-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.837390 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/4e3679d1-2b65-494e-bc5f-2a68697da816-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.837406 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/4e3679d1-2b65-494e-bc5f-2a68697da816-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.837423 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/4e3679d1-2b65-494e-bc5f-2a68697da816-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.837462 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/4e3679d1-2b65-494e-bc5f-2a68697da816-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.837484 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/4e3679d1-2b65-494e-bc5f-2a68697da816-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.838203 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/4e3679d1-2b65-494e-bc5f-2a68697da816-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.844621 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/4e3679d1-2b65-494e-bc5f-2a68697da816-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.845071 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/4e3679d1-2b65-494e-bc5f-2a68697da816-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.845693 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/4e3679d1-2b65-494e-bc5f-2a68697da816-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.849850 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/4e3679d1-2b65-494e-bc5f-2a68697da816-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.861691 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/4e3679d1-2b65-494e-bc5f-2a68697da816-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.872929 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvlhl\" (UniqueName: \"kubernetes.io/projected/4e3679d1-2b65-494e-bc5f-2a68697da816-kube-api-access-xvlhl\") pod \"alertmanager-metric-storage-0\" (UID: \"4e3679d1-2b65-494e-bc5f-2a68697da816\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:52 crc kubenswrapper[4982]: I1205 19:32:52.914571 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.266962 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.270070 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.278359 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.278411 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.278370 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.278370 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.278748 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.279121 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-rvw5r" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.290947 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.456411 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ce039d16-a4f9-40f3-9398-f6c2efc89b41-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.456456 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.456475 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.456503 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-config\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.456536 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ce039d16-a4f9-40f3-9398-f6c2efc89b41-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.456584 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfp64\" (UniqueName: \"kubernetes.io/projected/ce039d16-a4f9-40f3-9398-f6c2efc89b41-kube-api-access-wfp64\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.456602 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/ce039d16-a4f9-40f3-9398-f6c2efc89b41-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.456624 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.558321 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.558405 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.558443 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-config\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.558480 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ce039d16-a4f9-40f3-9398-f6c2efc89b41-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.558532 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfp64\" (UniqueName: \"kubernetes.io/projected/ce039d16-a4f9-40f3-9398-f6c2efc89b41-kube-api-access-wfp64\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.558552 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/ce039d16-a4f9-40f3-9398-f6c2efc89b41-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.558576 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.558625 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ce039d16-a4f9-40f3-9398-f6c2efc89b41-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.559830 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/ce039d16-a4f9-40f3-9398-f6c2efc89b41-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.561625 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.561671 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6dc18053a3b2267841393e5d13e7583bb9f8943657ae1a7f2ce4d8e6481b9d52/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.562566 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.562953 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ce039d16-a4f9-40f3-9398-f6c2efc89b41-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.563226 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ce039d16-a4f9-40f3-9398-f6c2efc89b41-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.563876 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.567728 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-config\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.579655 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfp64\" (UniqueName: \"kubernetes.io/projected/ce039d16-a4f9-40f3-9398-f6c2efc89b41-kube-api-access-wfp64\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.600958 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\") pod \"prometheus-metric-storage-0\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:53 crc kubenswrapper[4982]: I1205 19:32:53.897571 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 19:32:55 crc kubenswrapper[4982]: I1205 19:32:55.945478 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 19:32:55 crc kubenswrapper[4982]: I1205 19:32:55.948350 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:55 crc kubenswrapper[4982]: I1205 19:32:55.952790 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 19:32:55 crc kubenswrapper[4982]: I1205 19:32:55.952873 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 19:32:55 crc kubenswrapper[4982]: I1205 19:32:55.952967 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 19:32:55 crc kubenswrapper[4982]: I1205 19:32:55.953338 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 19:32:55 crc kubenswrapper[4982]: I1205 19:32:55.966509 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-5zsgg" Dec 05 19:32:55 crc kubenswrapper[4982]: I1205 19:32:55.976228 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.097921 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.098049 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.098227 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.098406 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-config\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.098477 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2dft\" (UniqueName: \"kubernetes.io/projected/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-kube-api-access-b2dft\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.098557 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.098633 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.098781 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-51413814-8741-406b-b9d1-1ff2aba5ba5c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-51413814-8741-406b-b9d1-1ff2aba5ba5c\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.200014 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.200117 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-51413814-8741-406b-b9d1-1ff2aba5ba5c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-51413814-8741-406b-b9d1-1ff2aba5ba5c\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.200190 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.200217 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.200282 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.200322 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-config\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.200350 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2dft\" (UniqueName: \"kubernetes.io/projected/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-kube-api-access-b2dft\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.200383 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.200705 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.201334 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-config\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.201841 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.203293 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.203925 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-51413814-8741-406b-b9d1-1ff2aba5ba5c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-51413814-8741-406b-b9d1-1ff2aba5ba5c\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5ee3a1419d5063774b340deb4d0caf3b3adc0ea3495e8e74a5c48ae11af41261/globalmount\"" pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.205860 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.206136 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.207543 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.220476 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2dft\" (UniqueName: \"kubernetes.io/projected/7d0f7663-0afb-41dc-bae8-7efdafbf2ed2-kube-api-access-b2dft\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.248708 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-51413814-8741-406b-b9d1-1ff2aba5ba5c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-51413814-8741-406b-b9d1-1ff2aba5ba5c\") pod \"ovsdbserver-nb-0\" (UID: \"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2\") " pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.289006 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.673396 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jhsjf"] Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.674481 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.681824 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.682117 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-6866r" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.682310 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.695086 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jhsjf"] Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.722000 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-sct9d"] Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.724035 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.733322 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-sct9d"] Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.812840 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4d66436-88ae-4023-9601-bd2aa6954667-ovn-controller-tls-certs\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.812890 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d452a876-70e0-416c-ab4d-667b53e8f86e-var-run\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.813086 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4d66436-88ae-4023-9601-bd2aa6954667-combined-ca-bundle\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.813243 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6jfn\" (UniqueName: \"kubernetes.io/projected/c4d66436-88ae-4023-9601-bd2aa6954667-kube-api-access-s6jfn\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.813285 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c4d66436-88ae-4023-9601-bd2aa6954667-var-log-ovn\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.813321 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c4d66436-88ae-4023-9601-bd2aa6954667-var-run-ovn\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.813347 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d452a876-70e0-416c-ab4d-667b53e8f86e-scripts\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.813422 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4d66436-88ae-4023-9601-bd2aa6954667-scripts\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.813471 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d452a876-70e0-416c-ab4d-667b53e8f86e-etc-ovs\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.813531 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c4d66436-88ae-4023-9601-bd2aa6954667-var-run\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.813582 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d452a876-70e0-416c-ab4d-667b53e8f86e-var-lib\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.813666 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d452a876-70e0-416c-ab4d-667b53e8f86e-var-log\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.813756 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv5qq\" (UniqueName: \"kubernetes.io/projected/d452a876-70e0-416c-ab4d-667b53e8f86e-kube-api-access-kv5qq\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.915784 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4d66436-88ae-4023-9601-bd2aa6954667-combined-ca-bundle\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.915917 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6jfn\" (UniqueName: \"kubernetes.io/projected/c4d66436-88ae-4023-9601-bd2aa6954667-kube-api-access-s6jfn\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916008 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c4d66436-88ae-4023-9601-bd2aa6954667-var-log-ovn\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916038 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c4d66436-88ae-4023-9601-bd2aa6954667-var-run-ovn\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916089 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d452a876-70e0-416c-ab4d-667b53e8f86e-scripts\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916263 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4d66436-88ae-4023-9601-bd2aa6954667-scripts\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916302 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d452a876-70e0-416c-ab4d-667b53e8f86e-etc-ovs\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916332 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c4d66436-88ae-4023-9601-bd2aa6954667-var-run\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916368 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d452a876-70e0-416c-ab4d-667b53e8f86e-var-lib\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916407 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d452a876-70e0-416c-ab4d-667b53e8f86e-var-log\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916526 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv5qq\" (UniqueName: \"kubernetes.io/projected/d452a876-70e0-416c-ab4d-667b53e8f86e-kube-api-access-kv5qq\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916638 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c4d66436-88ae-4023-9601-bd2aa6954667-var-run-ovn\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916653 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4d66436-88ae-4023-9601-bd2aa6954667-ovn-controller-tls-certs\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916691 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d452a876-70e0-416c-ab4d-667b53e8f86e-var-run\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916690 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/d452a876-70e0-416c-ab4d-667b53e8f86e-etc-ovs\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916843 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c4d66436-88ae-4023-9601-bd2aa6954667-var-log-ovn\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916883 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/d452a876-70e0-416c-ab4d-667b53e8f86e-var-log\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916968 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c4d66436-88ae-4023-9601-bd2aa6954667-var-run\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.916971 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d452a876-70e0-416c-ab4d-667b53e8f86e-var-run\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.917133 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/d452a876-70e0-416c-ab4d-667b53e8f86e-var-lib\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.918432 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c4d66436-88ae-4023-9601-bd2aa6954667-scripts\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.920253 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/c4d66436-88ae-4023-9601-bd2aa6954667-ovn-controller-tls-certs\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.920697 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d452a876-70e0-416c-ab4d-667b53e8f86e-scripts\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.921431 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c4d66436-88ae-4023-9601-bd2aa6954667-combined-ca-bundle\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.933657 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6jfn\" (UniqueName: \"kubernetes.io/projected/c4d66436-88ae-4023-9601-bd2aa6954667-kube-api-access-s6jfn\") pod \"ovn-controller-jhsjf\" (UID: \"c4d66436-88ae-4023-9601-bd2aa6954667\") " pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.935754 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv5qq\" (UniqueName: \"kubernetes.io/projected/d452a876-70e0-416c-ab4d-667b53e8f86e-kube-api-access-kv5qq\") pod \"ovn-controller-ovs-sct9d\" (UID: \"d452a876-70e0-416c-ab4d-667b53e8f86e\") " pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:32:56 crc kubenswrapper[4982]: I1205 19:32:56.996466 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jhsjf" Dec 05 19:32:57 crc kubenswrapper[4982]: I1205 19:32:57.038067 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.026802 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.028725 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.035900 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.066583 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.066989 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-5x865" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.068107 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.068336 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.169557 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.169604 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.169633 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.169670 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txvl4\" (UniqueName: \"kubernetes.io/projected/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-kube-api-access-txvl4\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.169738 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.169758 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8a04aae7-79a8-493b-8c5e-833b5973b584\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8a04aae7-79a8-493b-8c5e-833b5973b584\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.169787 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.169810 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-config\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.271637 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.271711 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.271765 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txvl4\" (UniqueName: \"kubernetes.io/projected/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-kube-api-access-txvl4\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.271821 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.271853 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8a04aae7-79a8-493b-8c5e-833b5973b584\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8a04aae7-79a8-493b-8c5e-833b5973b584\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.271900 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.271925 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-config\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.272001 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.273995 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.274683 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.274712 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8a04aae7-79a8-493b-8c5e-833b5973b584\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8a04aae7-79a8-493b-8c5e-833b5973b584\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/00d3c47eb97ed45af33414da23d844596e71c0fc579c010629fce4d5fc2d0d6c/globalmount\"" pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.275699 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-config\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.276031 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.277070 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.277253 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.279060 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.289979 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txvl4\" (UniqueName: \"kubernetes.io/projected/e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0-kube-api-access-txvl4\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.306215 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8a04aae7-79a8-493b-8c5e-833b5973b584\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8a04aae7-79a8-493b-8c5e-833b5973b584\") pod \"ovsdbserver-sb-0\" (UID: \"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0\") " pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:00 crc kubenswrapper[4982]: I1205 19:33:00.390118 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:01 crc kubenswrapper[4982]: I1205 19:33:01.072830 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 19:33:02 crc kubenswrapper[4982]: E1205 19:33:02.433463 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 19:33:02 crc kubenswrapper[4982]: E1205 19:33:02.434036 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6sxn7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-hd4vz_openstack(01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:33:02 crc kubenswrapper[4982]: E1205 19:33:02.435690 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" podUID="01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6" Dec 05 19:33:02 crc kubenswrapper[4982]: E1205 19:33:02.507246 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 19:33:02 crc kubenswrapper[4982]: E1205 19:33:02.507464 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gwsv5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-4wssq_openstack(fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:33:02 crc kubenswrapper[4982]: E1205 19:33:02.509308 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" podUID="fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48" Dec 05 19:33:02 crc kubenswrapper[4982]: I1205 19:33:02.854574 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.038721 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-dns-svc\") pod \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\" (UID: \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\") " Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.039050 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-config\") pod \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\" (UID: \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\") " Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.039300 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6sxn7\" (UniqueName: \"kubernetes.io/projected/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-kube-api-access-6sxn7\") pod \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\" (UID: \"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6\") " Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.039311 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6" (UID: "01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.039531 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-config" (OuterVolumeSpecName: "config") pod "01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6" (UID: "01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.040003 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.040198 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.045526 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-kube-api-access-6sxn7" (OuterVolumeSpecName: "kube-api-access-6sxn7") pod "01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6" (UID: "01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6"). InnerVolumeSpecName "kube-api-access-6sxn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.141897 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6sxn7\" (UniqueName: \"kubernetes.io/projected/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6-kube-api-access-6sxn7\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.466126 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" event={"ID":"01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6","Type":"ContainerDied","Data":"577c828cb672f9b7ce5ca1137083648228c88432c449d4dccd262b43308f8128"} Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.466158 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-hd4vz" Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.467160 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b6542514-631f-4a81-aba8-11cfebd33048","Type":"ContainerStarted","Data":"9fbba0ce1d769be79c18fd1ac08aa502d44914e774c076cf951a8953a271b102"} Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.532817 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hd4vz"] Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.539927 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-hd4vz"] Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.599238 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jhsjf"] Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.637954 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-vfqzw"] Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.678205 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-ft2hj"] Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.689392 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.697285 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.704328 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.711562 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.788506 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.793980 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.799067 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 19:33:03 crc kubenswrapper[4982]: W1205 19:33:03.824623 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod145ed592_ad9f_487f_940e_71b78c2f48e1.slice/crio-8d1c136a289e7af021d0626c8846095fb795aff874ece0f753d303cefc2aa483 WatchSource:0}: Error finding container 8d1c136a289e7af021d0626c8846095fb795aff874ece0f753d303cefc2aa483: Status 404 returned error can't find the container with id 8d1c136a289e7af021d0626c8846095fb795aff874ece0f753d303cefc2aa483 Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.871835 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 19:33:03 crc kubenswrapper[4982]: I1205 19:33:03.883704 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.028168 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.029716 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.032756 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca-bundle" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.033160 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-http" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.033322 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-grpc" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.033369 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-dockercfg-l4j9t" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.034698 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-config" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.057253 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.073508 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48-config\") pod \"fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48\" (UID: \"fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48\") " Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.073698 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gwsv5\" (UniqueName: \"kubernetes.io/projected/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48-kube-api-access-gwsv5\") pod \"fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48\" (UID: \"fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48\") " Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.074031 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48-config" (OuterVolumeSpecName: "config") pod "fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48" (UID: "fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.080870 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48-kube-api-access-gwsv5" (OuterVolumeSpecName: "kube-api-access-gwsv5") pod "fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48" (UID: "fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48"). InnerVolumeSpecName "kube-api-access-gwsv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.175647 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.176960 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/97f36210-9f01-4ba6-95e4-0aea23aefbb3-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.177012 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/97f36210-9f01-4ba6-95e4-0aea23aefbb3-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.177086 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/97f36210-9f01-4ba6-95e4-0aea23aefbb3-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.177140 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97f36210-9f01-4ba6-95e4-0aea23aefbb3-config\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.177182 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mxgg\" (UniqueName: \"kubernetes.io/projected/97f36210-9f01-4ba6-95e4-0aea23aefbb3-kube-api-access-7mxgg\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.177221 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gwsv5\" (UniqueName: \"kubernetes.io/projected/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48-kube-api-access-gwsv5\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.177233 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.177650 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.179548 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-http" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.179751 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-grpc" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.180456 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-loki-s3" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.198324 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.266455 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.269397 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.276092 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-grpc" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.276641 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-http" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.285086 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8spr\" (UniqueName: \"kubernetes.io/projected/39dc27e3-5788-40fd-b186-9c91aa5618eb-kube-api-access-g8spr\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.285132 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/97f36210-9f01-4ba6-95e4-0aea23aefbb3-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.285299 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/97f36210-9f01-4ba6-95e4-0aea23aefbb3-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.285417 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/39dc27e3-5788-40fd-b186-9c91aa5618eb-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.285514 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/39dc27e3-5788-40fd-b186-9c91aa5618eb-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.285805 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/39dc27e3-5788-40fd-b186-9c91aa5618eb-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.319504 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/97f36210-9f01-4ba6-95e4-0aea23aefbb3-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.319740 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97f36210-9f01-4ba6-95e4-0aea23aefbb3-config\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.319787 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/39dc27e3-5788-40fd-b186-9c91aa5618eb-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.319791 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/97f36210-9f01-4ba6-95e4-0aea23aefbb3-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.320401 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/97f36210-9f01-4ba6-95e4-0aea23aefbb3-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.323267 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/97f36210-9f01-4ba6-95e4-0aea23aefbb3-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.325663 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mxgg\" (UniqueName: \"kubernetes.io/projected/97f36210-9f01-4ba6-95e4-0aea23aefbb3-kube-api-access-7mxgg\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.325716 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39dc27e3-5788-40fd-b186-9c91aa5618eb-config\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.325766 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97f36210-9f01-4ba6-95e4-0aea23aefbb3-config\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.348020 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.357702 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mxgg\" (UniqueName: \"kubernetes.io/projected/97f36210-9f01-4ba6-95e4-0aea23aefbb3-kube-api-access-7mxgg\") pod \"cloudkitty-lokistack-distributor-664b687b54-xvg29\" (UID: \"97f36210-9f01-4ba6-95e4-0aea23aefbb3\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.436361 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e468d19-dc16-452b-b3c8-cd5df67c4748-config\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.436541 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/39dc27e3-5788-40fd-b186-9c91aa5618eb-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.436600 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39dc27e3-5788-40fd-b186-9c91aa5618eb-config\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.436671 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8spr\" (UniqueName: \"kubernetes.io/projected/39dc27e3-5788-40fd-b186-9c91aa5618eb-kube-api-access-g8spr\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.436744 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/39dc27e3-5788-40fd-b186-9c91aa5618eb-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.436803 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7w2r9\" (UniqueName: \"kubernetes.io/projected/4e468d19-dc16-452b-b3c8-cd5df67c4748-kube-api-access-7w2r9\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.436870 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4e468d19-dc16-452b-b3c8-cd5df67c4748-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.436895 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/39dc27e3-5788-40fd-b186-9c91aa5618eb-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.437097 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/39dc27e3-5788-40fd-b186-9c91aa5618eb-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.437160 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/4e468d19-dc16-452b-b3c8-cd5df67c4748-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.437197 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/4e468d19-dc16-452b-b3c8-cd5df67c4748-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.444513 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/39dc27e3-5788-40fd-b186-9c91aa5618eb-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.448668 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39dc27e3-5788-40fd-b186-9c91aa5618eb-config\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.448732 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.448754 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/39dc27e3-5788-40fd-b186-9c91aa5618eb-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.451871 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/39dc27e3-5788-40fd-b186-9c91aa5618eb-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.452263 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.455249 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/39dc27e3-5788-40fd-b186-9c91aa5618eb-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.463055 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.467638 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-dockercfg-t4scr" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.467738 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.467828 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-client-http" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.467848 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-http" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.468004 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.468173 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway-ca-bundle" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.468285 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.490820 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" event={"ID":"fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48","Type":"ContainerDied","Data":"94ed5879cc1fb63a03304672f0e89cda74a1b87eca438628d4cbbdd31f5735fb"} Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.490957 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-4wssq" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.491710 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.494123 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.498422 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8spr\" (UniqueName: \"kubernetes.io/projected/39dc27e3-5788-40fd-b186-9c91aa5618eb-kube-api-access-g8spr\") pod \"cloudkitty-lokistack-querier-5467947bf7-zrl2q\" (UID: \"39dc27e3-5788-40fd-b186-9c91aa5618eb\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.501096 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" event={"ID":"cdd17104-44d2-452c-a6c2-50a2884a7a6f","Type":"ContainerStarted","Data":"b25ed1f22d6560ecb86e7e1c96ad6a1ce2f031127ea189c7710df1f6301751cd"} Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.507837 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"145ed592-ad9f-487f-940e-71b78c2f48e1","Type":"ContainerStarted","Data":"8d1c136a289e7af021d0626c8846095fb795aff874ece0f753d303cefc2aa483"} Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.515938 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.522287 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"34fad3bb-6720-4219-8862-08492842062a","Type":"ContainerStarted","Data":"7b6c122419c54452b6a13a4f40cc92a39a00e1f4fcb5f1e664334b6aa1d63b6c"} Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.527964 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0","Type":"ContainerStarted","Data":"610fb7d4f12e5c8252c4f7ed0f952ed015bbcdaec1431d517b59a0ba21b913b9"} Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.534546 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.535174 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2c4d593c-5baa-4b09-b586-7b0e65acaa73","Type":"ContainerStarted","Data":"a995524ac63ebe70eb959623614c175aff9662908b8bcf494be06f10171cfe25"} Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.538559 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" event={"ID":"41538066-4378-432a-8e98-d3816216af50","Type":"ContainerStarted","Data":"065cc6bdb7cd26be86ad8a30d27c4d5a5315d1f7d9c6c612ecd1cd94b9bf8661"} Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.538643 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7w2r9\" (UniqueName: \"kubernetes.io/projected/4e468d19-dc16-452b-b3c8-cd5df67c4748-kube-api-access-7w2r9\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.538694 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4e468d19-dc16-452b-b3c8-cd5df67c4748-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.538767 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/4e468d19-dc16-452b-b3c8-cd5df67c4748-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.538806 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/4e468d19-dc16-452b-b3c8-cd5df67c4748-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.538860 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e468d19-dc16-452b-b3c8-cd5df67c4748-config\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.539743 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4e468d19-dc16-452b-b3c8-cd5df67c4748-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.539885 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4e468d19-dc16-452b-b3c8-cd5df67c4748-config\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.543308 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/4e468d19-dc16-452b-b3c8-cd5df67c4748-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.544669 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jhsjf" event={"ID":"c4d66436-88ae-4023-9601-bd2aa6954667","Type":"ContainerStarted","Data":"3fe39ab233917c93cf30c9be80960779622313caba7d1e655710d2f3de6bbff8"} Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.545433 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/4e468d19-dc16-452b-b3c8-cd5df67c4748-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.546496 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce039d16-a4f9-40f3-9398-f6c2efc89b41","Type":"ContainerStarted","Data":"f2399afd69ff0155a12e681dbd5083243a462ae4fef7c40d1deee94ce050725f"} Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.549620 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"4e3679d1-2b65-494e-bc5f-2a68697da816","Type":"ContainerStarted","Data":"1905f85782b8a4f40dbcc5403da9a04823f1539ce34b6b5942b66abc9c24cf0f"} Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.554316 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"af72a355-0521-4724-8224-c7fd9046b4d6","Type":"ContainerStarted","Data":"14f6c336b58e74c24ba362ebcf6be1db42a18ed7ba4ff56eac3087e1889b24a9"} Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.557048 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"700dc7aa-a441-4419-90c8-ff6ec6d31f23","Type":"ContainerStarted","Data":"09a48a7c542ae697d46ab1190e9c87f4958128b302da9da5d50b2febb74a597c"} Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.562987 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7w2r9\" (UniqueName: \"kubernetes.io/projected/4e468d19-dc16-452b-b3c8-cd5df67c4748-kube-api-access-7w2r9\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z\" (UID: \"4e468d19-dc16-452b-b3c8-cd5df67c4748\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.614142 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-sct9d"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.638719 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.640043 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4wssq"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.640974 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641014 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641034 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/25283efb-caa6-418a-8228-f3dcf1802be2-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641095 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/25283efb-caa6-418a-8228-f3dcf1802be2-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641121 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/25283efb-caa6-418a-8228-f3dcf1802be2-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641179 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsbw7\" (UniqueName: \"kubernetes.io/projected/25283efb-caa6-418a-8228-f3dcf1802be2-kube-api-access-lsbw7\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641201 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmtr2\" (UniqueName: \"kubernetes.io/projected/b670abee-8f29-4979-8c53-5226b58a0141-kube-api-access-pmtr2\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641222 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641260 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641287 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/b670abee-8f29-4979-8c53-5226b58a0141-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641304 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/b670abee-8f29-4979-8c53-5226b58a0141-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641322 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641337 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641353 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/b670abee-8f29-4979-8c53-5226b58a0141-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641373 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641407 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641422 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.641445 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.653546 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.658255 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-4wssq"] Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742621 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/25283efb-caa6-418a-8228-f3dcf1802be2-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742675 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsbw7\" (UniqueName: \"kubernetes.io/projected/25283efb-caa6-418a-8228-f3dcf1802be2-kube-api-access-lsbw7\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742700 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmtr2\" (UniqueName: \"kubernetes.io/projected/b670abee-8f29-4979-8c53-5226b58a0141-kube-api-access-pmtr2\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742723 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742759 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742784 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/b670abee-8f29-4979-8c53-5226b58a0141-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742804 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/b670abee-8f29-4979-8c53-5226b58a0141-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742822 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742840 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742855 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/b670abee-8f29-4979-8c53-5226b58a0141-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742876 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742909 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742925 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742953 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.742987 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.743003 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.743022 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/25283efb-caa6-418a-8228-f3dcf1802be2-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.743047 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/25283efb-caa6-418a-8228-f3dcf1802be2-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.746683 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.747267 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.747461 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.748869 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.749224 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.749739 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b670abee-8f29-4979-8c53-5226b58a0141-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.750514 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.755718 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.756888 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/25283efb-caa6-418a-8228-f3dcf1802be2-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.758734 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.761117 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/b670abee-8f29-4979-8c53-5226b58a0141-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.761443 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/25283efb-caa6-418a-8228-f3dcf1802be2-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.761643 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/25283efb-caa6-418a-8228-f3dcf1802be2-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.762468 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/b670abee-8f29-4979-8c53-5226b58a0141-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.763662 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/b670abee-8f29-4979-8c53-5226b58a0141-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.764013 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsbw7\" (UniqueName: \"kubernetes.io/projected/25283efb-caa6-418a-8228-f3dcf1802be2-kube-api-access-lsbw7\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.764301 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/25283efb-caa6-418a-8228-f3dcf1802be2-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-jgdkk\" (UID: \"25283efb-caa6-418a-8228-f3dcf1802be2\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.766622 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmtr2\" (UniqueName: \"kubernetes.io/projected/b670abee-8f29-4979-8c53-5226b58a0141-kube-api-access-pmtr2\") pod \"cloudkitty-lokistack-gateway-bc75944f-xcgh9\" (UID: \"b670abee-8f29-4979-8c53-5226b58a0141\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: W1205 19:33:04.779967 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd452a876_70e0_416c_ab4d_667b53e8f86e.slice/crio-f261a10437220ba7c896bf3cf2bc76c8dd44f79c2d69a785b17533fd28d858c0 WatchSource:0}: Error finding container f261a10437220ba7c896bf3cf2bc76c8dd44f79c2d69a785b17533fd28d858c0: Status 404 returned error can't find the container with id f261a10437220ba7c896bf3cf2bc76c8dd44f79c2d69a785b17533fd28d858c0 Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.823317 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.842016 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:04 crc kubenswrapper[4982]: I1205 19:33:04.899586 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 19:33:04 crc kubenswrapper[4982]: W1205 19:33:04.976781 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d0f7663_0afb_41dc_bae8_7efdafbf2ed2.slice/crio-c4951cca9f72486aa305682a2d8950b201164d785fc8cf5a9d983da37e7f5a86 WatchSource:0}: Error finding container c4951cca9f72486aa305682a2d8950b201164d785fc8cf5a9d983da37e7f5a86: Status 404 returned error can't find the container with id c4951cca9f72486aa305682a2d8950b201164d785fc8cf5a9d983da37e7f5a86 Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.185838 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.187291 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.191102 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-grpc" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.191205 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-http" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.209297 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.260703 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.262163 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.269808 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-http" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.270355 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-grpc" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.277326 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.335673 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.337031 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.341927 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-http" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.341947 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-grpc" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.346139 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357365 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/85a8d068-da43-4ed2-879a-281872eab097-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357420 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357454 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcxwz\" (UniqueName: \"kubernetes.io/projected/85a8d068-da43-4ed2-879a-281872eab097-kube-api-access-wcxwz\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357490 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357519 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357555 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85a8d068-da43-4ed2-879a-281872eab097-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357593 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a8d068-da43-4ed2-879a-281872eab097-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357625 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/85a8d068-da43-4ed2-879a-281872eab097-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357650 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357675 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/85a8d068-da43-4ed2-879a-281872eab097-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357697 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357726 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.357811 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4hhj\" (UniqueName: \"kubernetes.io/projected/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-kube-api-access-k4hhj\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.358062 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.358084 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.415679 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6" path="/var/lib/kubelet/pods/01e8dd13-c3bc-4521-bc5b-d798f1fe7ce6/volumes" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.416246 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48" path="/var/lib/kubelet/pods/fc04c3f9-68d7-4513-9a8f-ce3c1c9bee48/volumes" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.471365 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.471459 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/85a8d068-da43-4ed2-879a-281872eab097-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.471501 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.471531 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcxwz\" (UniqueName: \"kubernetes.io/projected/85a8d068-da43-4ed2-879a-281872eab097-kube-api-access-wcxwz\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.471571 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.471609 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.471650 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85a8d068-da43-4ed2-879a-281872eab097-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.471680 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rqp6\" (UniqueName: \"kubernetes.io/projected/8c96cc97-f375-489f-9168-bac3695b309a-kube-api-access-8rqp6\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.471712 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a8d068-da43-4ed2-879a-281872eab097-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.471749 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/85a8d068-da43-4ed2-879a-281872eab097-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.471762 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.471784 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.473632 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.474686 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/85a8d068-da43-4ed2-879a-281872eab097-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.475237 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/85a8d068-da43-4ed2-879a-281872eab097-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.475290 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.475342 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.475406 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/8c96cc97-f375-489f-9168-bac3695b309a-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.475432 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.475459 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c96cc97-f375-489f-9168-bac3695b309a-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.475520 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4hhj\" (UniqueName: \"kubernetes.io/projected/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-kube-api-access-k4hhj\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.475595 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/8c96cc97-f375-489f-9168-bac3695b309a-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.475620 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8c96cc97-f375-489f-9168-bac3695b309a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.475650 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/8c96cc97-f375-489f-9168-bac3695b309a-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.475721 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.475892 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.477013 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.477455 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.483718 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/85a8d068-da43-4ed2-879a-281872eab097-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.485470 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/85a8d068-da43-4ed2-879a-281872eab097-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.489811 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/85a8d068-da43-4ed2-879a-281872eab097-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.491406 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a8d068-da43-4ed2-879a-281872eab097-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.503943 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.506451 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.507471 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4hhj\" (UniqueName: \"kubernetes.io/projected/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-kube-api-access-k4hhj\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.515858 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcxwz\" (UniqueName: \"kubernetes.io/projected/85a8d068-da43-4ed2-879a-281872eab097-kube-api-access-wcxwz\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.522623 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/892ec5b5-4495-4ef9-ae57-7e3c535e11ca-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.539575 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"892ec5b5-4495-4ef9-ae57-7e3c535e11ca\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.551965 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.552565 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"85a8d068-da43-4ed2-879a-281872eab097\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.581195 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rqp6\" (UniqueName: \"kubernetes.io/projected/8c96cc97-f375-489f-9168-bac3695b309a-kube-api-access-8rqp6\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.581323 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/8c96cc97-f375-489f-9168-bac3695b309a-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.581358 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.581384 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c96cc97-f375-489f-9168-bac3695b309a-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.581455 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8c96cc97-f375-489f-9168-bac3695b309a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.581481 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/8c96cc97-f375-489f-9168-bac3695b309a-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.581510 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/8c96cc97-f375-489f-9168-bac3695b309a-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.583812 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c96cc97-f375-489f-9168-bac3695b309a-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.584798 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8c96cc97-f375-489f-9168-bac3695b309a-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.585608 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.586388 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/8c96cc97-f375-489f-9168-bac3695b309a-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.590762 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.600192 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/8c96cc97-f375-489f-9168-bac3695b309a-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.605890 4982 generic.go:334] "Generic (PLEG): container finished" podID="cdd17104-44d2-452c-a6c2-50a2884a7a6f" containerID="71e7c69443870c6e81c1f681e2fddcc1a8b84e1a092737e46c5f144d94e1c0c1" exitCode=0 Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.606001 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" event={"ID":"cdd17104-44d2-452c-a6c2-50a2884a7a6f","Type":"ContainerDied","Data":"71e7c69443870c6e81c1f681e2fddcc1a8b84e1a092737e46c5f144d94e1c0c1"} Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.607971 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2","Type":"ContainerStarted","Data":"c4951cca9f72486aa305682a2d8950b201164d785fc8cf5a9d983da37e7f5a86"} Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.609692 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/8c96cc97-f375-489f-9168-bac3695b309a-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.621616 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-sct9d" event={"ID":"d452a876-70e0-416c-ab4d-667b53e8f86e","Type":"ContainerStarted","Data":"f261a10437220ba7c896bf3cf2bc76c8dd44f79c2d69a785b17533fd28d858c0"} Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.632979 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.642215 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rqp6\" (UniqueName: \"kubernetes.io/projected/8c96cc97-f375-489f-9168-bac3695b309a-kube-api-access-8rqp6\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"8c96cc97-f375-489f-9168-bac3695b309a\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.652257 4982 generic.go:334] "Generic (PLEG): container finished" podID="41538066-4378-432a-8e98-d3816216af50" containerID="2f1c9339f4acc05c86039dd686d9de957d9dcb4821a6e1731cf07387dcf4de35" exitCode=0 Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.652299 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" event={"ID":"41538066-4378-432a-8e98-d3816216af50","Type":"ContainerDied","Data":"2f1c9339f4acc05c86039dd686d9de957d9dcb4821a6e1731cf07387dcf4de35"} Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.658533 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.708682 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29"] Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.717967 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q"] Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.729193 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z"] Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.735230 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9"] Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.806542 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:05 crc kubenswrapper[4982]: I1205 19:33:05.876715 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk"] Dec 05 19:33:06 crc kubenswrapper[4982]: W1205 19:33:06.525560 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod25283efb_caa6_418a_8228_f3dcf1802be2.slice/crio-309e0f2e663f5c10597d232707b34f61165cb22be7be6daf851ae51ffd8df36a WatchSource:0}: Error finding container 309e0f2e663f5c10597d232707b34f61165cb22be7be6daf851ae51ffd8df36a: Status 404 returned error can't find the container with id 309e0f2e663f5c10597d232707b34f61165cb22be7be6daf851ae51ffd8df36a Dec 05 19:33:06 crc kubenswrapper[4982]: W1205 19:33:06.527689 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e468d19_dc16_452b_b3c8_cd5df67c4748.slice/crio-8c067770b5481286181ddf4fba1eb60b77e67b9956d01aa3f86c1f5e385a30bd WatchSource:0}: Error finding container 8c067770b5481286181ddf4fba1eb60b77e67b9956d01aa3f86c1f5e385a30bd: Status 404 returned error can't find the container with id 8c067770b5481286181ddf4fba1eb60b77e67b9956d01aa3f86c1f5e385a30bd Dec 05 19:33:06 crc kubenswrapper[4982]: I1205 19:33:06.664848 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" event={"ID":"4e468d19-dc16-452b-b3c8-cd5df67c4748","Type":"ContainerStarted","Data":"8c067770b5481286181ddf4fba1eb60b77e67b9956d01aa3f86c1f5e385a30bd"} Dec 05 19:33:06 crc kubenswrapper[4982]: I1205 19:33:06.666901 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" event={"ID":"25283efb-caa6-418a-8228-f3dcf1802be2","Type":"ContainerStarted","Data":"309e0f2e663f5c10597d232707b34f61165cb22be7be6daf851ae51ffd8df36a"} Dec 05 19:33:07 crc kubenswrapper[4982]: W1205 19:33:07.725026 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97f36210_9f01_4ba6_95e4_0aea23aefbb3.slice/crio-8bc984549379f4d9182ae9b538b763d940e2a2eba41f39f695d2934280d11588 WatchSource:0}: Error finding container 8bc984549379f4d9182ae9b538b763d940e2a2eba41f39f695d2934280d11588: Status 404 returned error can't find the container with id 8bc984549379f4d9182ae9b538b763d940e2a2eba41f39f695d2934280d11588 Dec 05 19:33:07 crc kubenswrapper[4982]: W1205 19:33:07.727391 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39dc27e3_5788_40fd_b186_9c91aa5618eb.slice/crio-a9de9dc3bf4f28687637184751c3f1a7103e85d241120a934de05da3288a3519 WatchSource:0}: Error finding container a9de9dc3bf4f28687637184751c3f1a7103e85d241120a934de05da3288a3519: Status 404 returned error can't find the container with id a9de9dc3bf4f28687637184751c3f1a7103e85d241120a934de05da3288a3519 Dec 05 19:33:07 crc kubenswrapper[4982]: W1205 19:33:07.730969 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb670abee_8f29_4979_8c53_5226b58a0141.slice/crio-f1575c3db5d94a5db3744975c93a16ed29a2bff1df33dea69cc7abce5260d311 WatchSource:0}: Error finding container f1575c3db5d94a5db3744975c93a16ed29a2bff1df33dea69cc7abce5260d311: Status 404 returned error can't find the container with id f1575c3db5d94a5db3744975c93a16ed29a2bff1df33dea69cc7abce5260d311 Dec 05 19:33:08 crc kubenswrapper[4982]: I1205 19:33:08.692323 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" event={"ID":"b670abee-8f29-4979-8c53-5226b58a0141","Type":"ContainerStarted","Data":"f1575c3db5d94a5db3744975c93a16ed29a2bff1df33dea69cc7abce5260d311"} Dec 05 19:33:08 crc kubenswrapper[4982]: I1205 19:33:08.694177 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" event={"ID":"97f36210-9f01-4ba6-95e4-0aea23aefbb3","Type":"ContainerStarted","Data":"8bc984549379f4d9182ae9b538b763d940e2a2eba41f39f695d2934280d11588"} Dec 05 19:33:08 crc kubenswrapper[4982]: I1205 19:33:08.695931 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" event={"ID":"39dc27e3-5788-40fd-b186-9c91aa5618eb","Type":"ContainerStarted","Data":"a9de9dc3bf4f28687637184751c3f1a7103e85d241120a934de05da3288a3519"} Dec 05 19:33:12 crc kubenswrapper[4982]: I1205 19:33:12.557787 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:33:12 crc kubenswrapper[4982]: I1205 19:33:12.558705 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:33:18 crc kubenswrapper[4982]: E1205 19:33:18.099075 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 05 19:33:18 crc kubenswrapper[4982]: E1205 19:33:18.099695 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-znbs4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(af72a355-0521-4724-8224-c7fd9046b4d6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:33:18 crc kubenswrapper[4982]: E1205 19:33:18.100795 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="af72a355-0521-4724-8224-c7fd9046b4d6" Dec 05 19:33:18 crc kubenswrapper[4982]: E1205 19:33:18.136390 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 05 19:33:18 crc kubenswrapper[4982]: E1205 19:33:18.136550 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-56k8j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(2c4d593c-5baa-4b09-b586-7b0e65acaa73): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:33:18 crc kubenswrapper[4982]: E1205 19:33:18.137639 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" Dec 05 19:33:18 crc kubenswrapper[4982]: E1205 19:33:18.795475 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" Dec 05 19:33:18 crc kubenswrapper[4982]: E1205 19:33:18.796738 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="af72a355-0521-4724-8224-c7fd9046b4d6" Dec 05 19:33:19 crc kubenswrapper[4982]: E1205 19:33:19.378638 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified" Dec 05 19:33:19 crc kubenswrapper[4982]: E1205 19:33:19.378780 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:ovsdb-server-init,Image:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,Command:[/usr/local/bin/container-scripts/init-ovsdb-server.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5cbh589hd6h56fh5ddh5d5h89h56dh587h5b5h547hc7hc9h555h65dh5d4h699h55ch56dh56h588h5cchc6hf5h5dfh98h688h5dch5fch6dhddh568q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-ovs,ReadOnly:false,MountPath:/etc/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log,ReadOnly:false,MountPath:/var/log/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-lib,ReadOnly:false,MountPath:/var/lib/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kv5qq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-ovs-sct9d_openstack(d452a876-70e0-416c-ab4d-667b53e8f86e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:33:19 crc kubenswrapper[4982]: E1205 19:33:19.380027 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-ovs-sct9d" podUID="d452a876-70e0-416c-ab4d-667b53e8f86e" Dec 05 19:33:19 crc kubenswrapper[4982]: E1205 19:33:19.396488 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 05 19:33:19 crc kubenswrapper[4982]: E1205 19:33:19.396613 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f68xv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(b6542514-631f-4a81-aba8-11cfebd33048): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:33:19 crc kubenswrapper[4982]: E1205 19:33:19.398372 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="b6542514-631f-4a81-aba8-11cfebd33048" Dec 05 19:33:19 crc kubenswrapper[4982]: E1205 19:33:19.802495 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified\\\"\"" pod="openstack/ovn-controller-ovs-sct9d" podUID="d452a876-70e0-416c-ab4d-667b53e8f86e" Dec 05 19:33:19 crc kubenswrapper[4982]: E1205 19:33:19.802861 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="b6542514-631f-4a81-aba8-11cfebd33048" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.128698 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-df27x"] Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.130166 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.136181 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.145756 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-df27x"] Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.270476 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c19a072d-5061-4c2e-a817-8708ec746095-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.270555 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/c19a072d-5061-4c2e-a817-8708ec746095-ovs-rundir\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.270618 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwnw2\" (UniqueName: \"kubernetes.io/projected/c19a072d-5061-4c2e-a817-8708ec746095-kube-api-access-lwnw2\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.270658 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/c19a072d-5061-4c2e-a817-8708ec746095-ovn-rundir\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.270672 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c19a072d-5061-4c2e-a817-8708ec746095-combined-ca-bundle\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.270688 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c19a072d-5061-4c2e-a817-8708ec746095-config\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.280850 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-vfqzw"] Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.322406 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-h8h59"] Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.323848 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.327361 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.332388 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-h8h59"] Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.374349 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/c19a072d-5061-4c2e-a817-8708ec746095-ovs-rundir\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.374467 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwnw2\" (UniqueName: \"kubernetes.io/projected/c19a072d-5061-4c2e-a817-8708ec746095-kube-api-access-lwnw2\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.374515 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/c19a072d-5061-4c2e-a817-8708ec746095-ovn-rundir\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.374535 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c19a072d-5061-4c2e-a817-8708ec746095-combined-ca-bundle\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.374558 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c19a072d-5061-4c2e-a817-8708ec746095-config\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.374613 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c19a072d-5061-4c2e-a817-8708ec746095-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.375015 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/c19a072d-5061-4c2e-a817-8708ec746095-ovs-rundir\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.375012 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/c19a072d-5061-4c2e-a817-8708ec746095-ovn-rundir\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.383120 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c19a072d-5061-4c2e-a817-8708ec746095-config\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.385343 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c19a072d-5061-4c2e-a817-8708ec746095-combined-ca-bundle\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.398090 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c19a072d-5061-4c2e-a817-8708ec746095-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.400716 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwnw2\" (UniqueName: \"kubernetes.io/projected/c19a072d-5061-4c2e-a817-8708ec746095-kube-api-access-lwnw2\") pod \"ovn-controller-metrics-df27x\" (UID: \"c19a072d-5061-4c2e-a817-8708ec746095\") " pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.430344 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-ft2hj"] Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.456663 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-sjz6f"] Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.458361 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.463084 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.464031 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-df27x" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.475902 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-h8h59\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.475993 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-config\") pod \"dnsmasq-dns-5bf47b49b7-h8h59\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.476024 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmqrm\" (UniqueName: \"kubernetes.io/projected/e285e4c4-bd4b-49f4-8768-2f88f362481d-kube-api-access-tmqrm\") pod \"dnsmasq-dns-5bf47b49b7-h8h59\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.476048 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-h8h59\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.476995 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-sjz6f"] Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.578378 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-config\") pod \"dnsmasq-dns-5bf47b49b7-h8h59\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.579199 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-config\") pod \"dnsmasq-dns-5bf47b49b7-h8h59\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.579214 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmqrm\" (UniqueName: \"kubernetes.io/projected/e285e4c4-bd4b-49f4-8768-2f88f362481d-kube-api-access-tmqrm\") pod \"dnsmasq-dns-5bf47b49b7-h8h59\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.579262 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-h8h59\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.579481 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-dns-svc\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.579561 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.579615 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-h8h59\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.579666 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx5kt\" (UniqueName: \"kubernetes.io/projected/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-kube-api-access-sx5kt\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.579692 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-config\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.579757 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.580134 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-h8h59\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.580665 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-h8h59\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.596579 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmqrm\" (UniqueName: \"kubernetes.io/projected/e285e4c4-bd4b-49f4-8768-2f88f362481d-kube-api-access-tmqrm\") pod \"dnsmasq-dns-5bf47b49b7-h8h59\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.674048 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.681196 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-dns-svc\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.681294 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.681355 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx5kt\" (UniqueName: \"kubernetes.io/projected/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-kube-api-access-sx5kt\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.681374 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-config\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.681402 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.682557 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.683162 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-dns-svc\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.683651 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.684440 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-config\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.699616 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx5kt\" (UniqueName: \"kubernetes.io/projected/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-kube-api-access-sx5kt\") pod \"dnsmasq-dns-8554648995-sjz6f\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:20 crc kubenswrapper[4982]: I1205 19:33:20.779423 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:22 crc kubenswrapper[4982]: I1205 19:33:22.452677 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 05 19:33:22 crc kubenswrapper[4982]: E1205 19:33:22.559628 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified" Dec 05 19:33:22 crc kubenswrapper[4982]: E1205 19:33:22.559862 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovn-controller,Image:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,Command:[ovn-controller --pidfile unix:/run/openvswitch/db.sock --certificate=/etc/pki/tls/certs/ovndb.crt --private-key=/etc/pki/tls/private/ovndb.key --ca-cert=/etc/pki/tls/certs/ovndbca.crt],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5cbh589hd6h56fh5ddh5d5h89h56dh587h5b5h547hc7hc9h555h65dh5d4h699h55ch56dh56h588h5cchc6hf5h5dfh98h688h5dch5fch6dhddh568q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run-ovn,ReadOnly:false,MountPath:/var/run/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log-ovn,ReadOnly:false,MountPath:/var/log/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s6jfn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_liveness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_readiness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/share/ovn/scripts/ovn-ctl stop_controller],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-jhsjf_openstack(c4d66436-88ae-4023-9601-bd2aa6954667): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:33:22 crc kubenswrapper[4982]: E1205 19:33:22.561260 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-jhsjf" podUID="c4d66436-88ae-4023-9601-bd2aa6954667" Dec 05 19:33:22 crc kubenswrapper[4982]: E1205 19:33:22.851166 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified" Dec 05 19:33:22 crc kubenswrapper[4982]: E1205 19:33:22.851641 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-sb,Image:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n564h699hb4h5b8h5c9h5c6h588hfh65h7ch589h5bbh66fh5hdh578h567h7fh65chb9h5bbh6dh654h5cdh598h9fh646h655hb5hb8h67h68dq,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-sb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-txvl4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:33:22 crc kubenswrapper[4982]: E1205 19:33:22.877867 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified\\\"\"" pod="openstack/ovn-controller-jhsjf" podUID="c4d66436-88ae-4023-9601-bd2aa6954667" Dec 05 19:33:22 crc kubenswrapper[4982]: W1205 19:33:22.912055 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod892ec5b5_4495_4ef9_ae57_7e3c535e11ca.slice/crio-252057b32831b1056c65b047df370657f88298486db12dcefc6b02f597ea5c20 WatchSource:0}: Error finding container 252057b32831b1056c65b047df370657f88298486db12dcefc6b02f597ea5c20: Status 404 returned error can't find the container with id 252057b32831b1056c65b047df370657f88298486db12dcefc6b02f597ea5c20 Dec 05 19:33:23 crc kubenswrapper[4982]: I1205 19:33:23.327897 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 05 19:33:23 crc kubenswrapper[4982]: I1205 19:33:23.463533 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:23.612615 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-h8h59"] Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:23.620201 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-df27x"] Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:23.769608 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-sjz6f"] Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:23.876501 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"85a8d068-da43-4ed2-879a-281872eab097","Type":"ContainerStarted","Data":"be7a27e5ae88df9258ffde4b407ddc1bf20c51c21ff2ee1771854fd7e3f26bb8"} Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:23.877597 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"892ec5b5-4495-4ef9-ae57-7e3c535e11ca","Type":"ContainerStarted","Data":"252057b32831b1056c65b047df370657f88298486db12dcefc6b02f597ea5c20"} Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:23.880391 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" event={"ID":"41538066-4378-432a-8e98-d3816216af50","Type":"ContainerStarted","Data":"6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8"} Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:23.880559 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:23.880565 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" podUID="41538066-4378-432a-8e98-d3816216af50" containerName="dnsmasq-dns" containerID="cri-o://6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8" gracePeriod=10 Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:23.901727 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" podStartSLOduration=37.42006454 podStartE2EDuration="37.901710921s" podCreationTimestamp="2025-12-05 19:32:46 +0000 UTC" firstStartedPulling="2025-12-05 19:33:03.756304326 +0000 UTC m=+1162.638190321" lastFinishedPulling="2025-12-05 19:33:04.237950707 +0000 UTC m=+1163.119836702" observedRunningTime="2025-12-05 19:33:23.895238269 +0000 UTC m=+1182.777124264" watchObservedRunningTime="2025-12-05 19:33:23.901710921 +0000 UTC m=+1182.783596916" Dec 05 19:33:24 crc kubenswrapper[4982]: E1205 19:33:24.102742 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 05 19:33:24 crc kubenswrapper[4982]: E1205 19:33:24.102797 4982 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 05 19:33:24 crc kubenswrapper[4982]: E1205 19:33:24.102923 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-h2h9b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(700dc7aa-a441-4419-90c8-ff6ec6d31f23): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 19:33:24 crc kubenswrapper[4982]: E1205 19:33:24.104209 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="700dc7aa-a441-4419-90c8-ff6ec6d31f23" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.665729 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.793671 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41538066-4378-432a-8e98-d3816216af50-config\") pod \"41538066-4378-432a-8e98-d3816216af50\" (UID: \"41538066-4378-432a-8e98-d3816216af50\") " Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.793811 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41538066-4378-432a-8e98-d3816216af50-dns-svc\") pod \"41538066-4378-432a-8e98-d3816216af50\" (UID: \"41538066-4378-432a-8e98-d3816216af50\") " Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.793936 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82jr4\" (UniqueName: \"kubernetes.io/projected/41538066-4378-432a-8e98-d3816216af50-kube-api-access-82jr4\") pod \"41538066-4378-432a-8e98-d3816216af50\" (UID: \"41538066-4378-432a-8e98-d3816216af50\") " Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.890620 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-df27x" event={"ID":"c19a072d-5061-4c2e-a817-8708ec746095","Type":"ContainerStarted","Data":"43a9947b4fe7fa2b23e5f5275dc4ae12ceea42a65134f2f76319ca95ee552795"} Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.891677 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-sjz6f" event={"ID":"37eb941f-a0a5-4f0f-9504-6c07ec6535a0","Type":"ContainerStarted","Data":"aa6b5ceea64582a55ae12f63e7bb030e395a9636bcb49bf4c06727e743697eca"} Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.892477 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" event={"ID":"e285e4c4-bd4b-49f4-8768-2f88f362481d","Type":"ContainerStarted","Data":"1745cb7e2ffaee027cb1565aa3fe7f4c279c4dedf7cc940c5bc99f13b0d81b30"} Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.894088 4982 generic.go:334] "Generic (PLEG): container finished" podID="41538066-4378-432a-8e98-d3816216af50" containerID="6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8" exitCode=0 Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.894122 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" event={"ID":"41538066-4378-432a-8e98-d3816216af50","Type":"ContainerDied","Data":"6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8"} Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.894222 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" event={"ID":"41538066-4378-432a-8e98-d3816216af50","Type":"ContainerDied","Data":"065cc6bdb7cd26be86ad8a30d27c4d5a5315d1f7d9c6c612ecd1cd94b9bf8661"} Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.894221 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-vfqzw" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.894242 4982 scope.go:117] "RemoveContainer" containerID="6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.895702 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"8c96cc97-f375-489f-9168-bac3695b309a","Type":"ContainerStarted","Data":"3bfc84bcb31329b0e598fd78681636ff374e536b26a6928ffaaf66ebafea54e8"} Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.895738 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"8c96cc97-f375-489f-9168-bac3695b309a","Type":"ContainerStarted","Data":"34e1906e4535fc8aacb837a65071edaf394c400d2823ee96b30e8fe5fc4ea6d7"} Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.895815 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:24 crc kubenswrapper[4982]: E1205 19:33:24.897037 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="700dc7aa-a441-4419-90c8-ff6ec6d31f23" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.919131 4982 scope.go:117] "RemoveContainer" containerID="2f1c9339f4acc05c86039dd686d9de957d9dcb4821a6e1731cf07387dcf4de35" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.929435 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-index-gateway-0" podStartSLOduration=20.92941263 podStartE2EDuration="20.92941263s" podCreationTimestamp="2025-12-05 19:33:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:33:24.922570029 +0000 UTC m=+1183.804456034" watchObservedRunningTime="2025-12-05 19:33:24.92941263 +0000 UTC m=+1183.811298645" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.938463 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41538066-4378-432a-8e98-d3816216af50-kube-api-access-82jr4" (OuterVolumeSpecName: "kube-api-access-82jr4") pod "41538066-4378-432a-8e98-d3816216af50" (UID: "41538066-4378-432a-8e98-d3816216af50"). InnerVolumeSpecName "kube-api-access-82jr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.950913 4982 scope.go:117] "RemoveContainer" containerID="6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8" Dec 05 19:33:24 crc kubenswrapper[4982]: E1205 19:33:24.951332 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8\": container with ID starting with 6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8 not found: ID does not exist" containerID="6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.951402 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8"} err="failed to get container status \"6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8\": rpc error: code = NotFound desc = could not find container \"6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8\": container with ID starting with 6cf1811e6e6614a0e39d90e7c6ac43899d9297e41ad209208b0d2561bfc0cfe8 not found: ID does not exist" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.951433 4982 scope.go:117] "RemoveContainer" containerID="2f1c9339f4acc05c86039dd686d9de957d9dcb4821a6e1731cf07387dcf4de35" Dec 05 19:33:24 crc kubenswrapper[4982]: E1205 19:33:24.952328 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f1c9339f4acc05c86039dd686d9de957d9dcb4821a6e1731cf07387dcf4de35\": container with ID starting with 2f1c9339f4acc05c86039dd686d9de957d9dcb4821a6e1731cf07387dcf4de35 not found: ID does not exist" containerID="2f1c9339f4acc05c86039dd686d9de957d9dcb4821a6e1731cf07387dcf4de35" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.952371 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f1c9339f4acc05c86039dd686d9de957d9dcb4821a6e1731cf07387dcf4de35"} err="failed to get container status \"2f1c9339f4acc05c86039dd686d9de957d9dcb4821a6e1731cf07387dcf4de35\": rpc error: code = NotFound desc = could not find container \"2f1c9339f4acc05c86039dd686d9de957d9dcb4821a6e1731cf07387dcf4de35\": container with ID starting with 2f1c9339f4acc05c86039dd686d9de957d9dcb4821a6e1731cf07387dcf4de35 not found: ID does not exist" Dec 05 19:33:24 crc kubenswrapper[4982]: I1205 19:33:24.997194 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82jr4\" (UniqueName: \"kubernetes.io/projected/41538066-4378-432a-8e98-d3816216af50-kube-api-access-82jr4\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.109101 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41538066-4378-432a-8e98-d3816216af50-config" (OuterVolumeSpecName: "config") pod "41538066-4378-432a-8e98-d3816216af50" (UID: "41538066-4378-432a-8e98-d3816216af50"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.199611 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/41538066-4378-432a-8e98-d3816216af50-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.903538 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" event={"ID":"4e468d19-dc16-452b-b3c8-cd5df67c4748","Type":"ContainerStarted","Data":"bc35069f1b900b32808eeb25c067c9a9bdb746def6301c79867ed2074d419b3a"} Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.903939 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.905586 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" event={"ID":"cdd17104-44d2-452c-a6c2-50a2884a7a6f","Type":"ContainerStarted","Data":"26dacd4cff0c74b38dfca03884d7a32b9dad6def60a2eeaf4acc7465d3cee48d"} Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.905643 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" podUID="cdd17104-44d2-452c-a6c2-50a2884a7a6f" containerName="dnsmasq-dns" containerID="cri-o://26dacd4cff0c74b38dfca03884d7a32b9dad6def60a2eeaf4acc7465d3cee48d" gracePeriod=10 Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.905654 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.909729 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"34fad3bb-6720-4219-8862-08492842062a","Type":"ContainerStarted","Data":"53ccf4f28a067c0623aa36fa8181d5a7fdf8879620396c6ba237118bfa9cb079"} Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.909846 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.913092 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" event={"ID":"25283efb-caa6-418a-8228-f3dcf1802be2","Type":"ContainerStarted","Data":"e28d675cd9940e169edc14e0c1fa69d854dafb9695293fd63c93a907574aacce"} Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.916185 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"85a8d068-da43-4ed2-879a-281872eab097","Type":"ContainerStarted","Data":"b7bc623f8d5a646b182c4d0cdf3cda8e182a6a34e2ed8eaa88e0a7f8b7fe5fc0"} Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.916299 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.918090 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" event={"ID":"97f36210-9f01-4ba6-95e4-0aea23aefbb3","Type":"ContainerStarted","Data":"cc3a639e9651d15cef075b046bd4caded3338d214b6f68937fd9a38e46a7d7d6"} Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.918123 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.919921 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2","Type":"ContainerStarted","Data":"7152174e8e7f7210bae34ec0858afd315ff7f4f848806e8e92ca74e165802c96"} Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.931847 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" podStartSLOduration=5.473127171 podStartE2EDuration="21.931829187s" podCreationTimestamp="2025-12-05 19:33:04 +0000 UTC" firstStartedPulling="2025-12-05 19:33:06.530587244 +0000 UTC m=+1165.412473239" lastFinishedPulling="2025-12-05 19:33:22.98928927 +0000 UTC m=+1181.871175255" observedRunningTime="2025-12-05 19:33:25.921959141 +0000 UTC m=+1184.803845136" watchObservedRunningTime="2025-12-05 19:33:25.931829187 +0000 UTC m=+1184.813715182" Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.946092 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-ingester-0" podStartSLOduration=21.946071213 podStartE2EDuration="21.946071213s" podCreationTimestamp="2025-12-05 19:33:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:33:25.937885298 +0000 UTC m=+1184.819771293" watchObservedRunningTime="2025-12-05 19:33:25.946071213 +0000 UTC m=+1184.827957208" Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.962485 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" podStartSLOduration=6.540867551 podStartE2EDuration="21.962449102s" podCreationTimestamp="2025-12-05 19:33:04 +0000 UTC" firstStartedPulling="2025-12-05 19:33:07.728263106 +0000 UTC m=+1166.610149111" lastFinishedPulling="2025-12-05 19:33:23.149844667 +0000 UTC m=+1182.031730662" observedRunningTime="2025-12-05 19:33:25.955620141 +0000 UTC m=+1184.837506136" watchObservedRunningTime="2025-12-05 19:33:25.962449102 +0000 UTC m=+1184.844335097" Dec 05 19:33:25 crc kubenswrapper[4982]: I1205 19:33:25.978389 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" podStartSLOduration=5.364711616 podStartE2EDuration="21.978369759s" podCreationTimestamp="2025-12-05 19:33:04 +0000 UTC" firstStartedPulling="2025-12-05 19:33:06.530050841 +0000 UTC m=+1165.411936826" lastFinishedPulling="2025-12-05 19:33:23.143708974 +0000 UTC m=+1182.025594969" observedRunningTime="2025-12-05 19:33:25.976532583 +0000 UTC m=+1184.858418578" watchObservedRunningTime="2025-12-05 19:33:25.978369759 +0000 UTC m=+1184.860255754" Dec 05 19:33:26 crc kubenswrapper[4982]: I1205 19:33:26.018453 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=17.489193819 podStartE2EDuration="36.018438399s" podCreationTimestamp="2025-12-05 19:32:50 +0000 UTC" firstStartedPulling="2025-12-05 19:33:03.759814694 +0000 UTC m=+1162.641700689" lastFinishedPulling="2025-12-05 19:33:22.289059264 +0000 UTC m=+1181.170945269" observedRunningTime="2025-12-05 19:33:25.988395519 +0000 UTC m=+1184.870281514" watchObservedRunningTime="2025-12-05 19:33:26.018438399 +0000 UTC m=+1184.900324394" Dec 05 19:33:26 crc kubenswrapper[4982]: I1205 19:33:26.031059 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" podStartSLOduration=40.420217969 podStartE2EDuration="41.031038953s" podCreationTimestamp="2025-12-05 19:32:45 +0000 UTC" firstStartedPulling="2025-12-05 19:33:03.781634759 +0000 UTC m=+1162.663520754" lastFinishedPulling="2025-12-05 19:33:04.392455743 +0000 UTC m=+1163.274341738" observedRunningTime="2025-12-05 19:33:26.009689821 +0000 UTC m=+1184.891575816" watchObservedRunningTime="2025-12-05 19:33:26.031038953 +0000 UTC m=+1184.912924968" Dec 05 19:33:26 crc kubenswrapper[4982]: I1205 19:33:26.256723 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/41538066-4378-432a-8e98-d3816216af50-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "41538066-4378-432a-8e98-d3816216af50" (UID: "41538066-4378-432a-8e98-d3816216af50"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:26 crc kubenswrapper[4982]: I1205 19:33:26.326751 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/41538066-4378-432a-8e98-d3816216af50-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:26 crc kubenswrapper[4982]: I1205 19:33:26.440131 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-vfqzw"] Dec 05 19:33:26 crc kubenswrapper[4982]: I1205 19:33:26.448617 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-vfqzw"] Dec 05 19:33:26 crc kubenswrapper[4982]: I1205 19:33:26.934594 4982 generic.go:334] "Generic (PLEG): container finished" podID="cdd17104-44d2-452c-a6c2-50a2884a7a6f" containerID="26dacd4cff0c74b38dfca03884d7a32b9dad6def60a2eeaf4acc7465d3cee48d" exitCode=0 Dec 05 19:33:26 crc kubenswrapper[4982]: I1205 19:33:26.934745 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" event={"ID":"cdd17104-44d2-452c-a6c2-50a2884a7a6f","Type":"ContainerDied","Data":"26dacd4cff0c74b38dfca03884d7a32b9dad6def60a2eeaf4acc7465d3cee48d"} Dec 05 19:33:26 crc kubenswrapper[4982]: I1205 19:33:26.935754 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:26 crc kubenswrapper[4982]: I1205 19:33:26.951842 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-jgdkk" Dec 05 19:33:27 crc kubenswrapper[4982]: I1205 19:33:27.404000 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41538066-4378-432a-8e98-d3816216af50" path="/var/lib/kubelet/pods/41538066-4378-432a-8e98-d3816216af50/volumes" Dec 05 19:33:28 crc kubenswrapper[4982]: I1205 19:33:28.962775 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" event={"ID":"b670abee-8f29-4979-8c53-5226b58a0141","Type":"ContainerStarted","Data":"be74fd3cb80513c60cd0b3a8c40588f0e64900ad8529fe3d393fa935994c64fa"} Dec 05 19:33:28 crc kubenswrapper[4982]: I1205 19:33:28.964126 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-sjz6f" event={"ID":"37eb941f-a0a5-4f0f-9504-6c07ec6535a0","Type":"ContainerStarted","Data":"0c3042f205d62fc88d754be3aa7a85469bebd43fa7de42ab1e7b09cced42c1da"} Dec 05 19:33:28 crc kubenswrapper[4982]: I1205 19:33:28.965417 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"892ec5b5-4495-4ef9-ae57-7e3c535e11ca","Type":"ContainerStarted","Data":"001b72913b982c458b2036e2aa62b0a2c4b6c9c7df0696c6763ef35bfef34893"} Dec 05 19:33:28 crc kubenswrapper[4982]: I1205 19:33:28.968647 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" event={"ID":"e285e4c4-bd4b-49f4-8768-2f88f362481d","Type":"ContainerStarted","Data":"479784d611a18344855a4fb7db71faaa3ede9a6e5a333ee705b585a1c84cfb6d"} Dec 05 19:33:28 crc kubenswrapper[4982]: I1205 19:33:28.970050 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce039d16-a4f9-40f3-9398-f6c2efc89b41","Type":"ContainerStarted","Data":"e2b5d627abb81fcf217497bed705f68b49a9e541aa87fc9be4ce37ceb9eae824"} Dec 05 19:33:28 crc kubenswrapper[4982]: I1205 19:33:28.972545 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" event={"ID":"39dc27e3-5788-40fd-b186-9c91aa5618eb","Type":"ContainerStarted","Data":"402d6dc80dabaf03c19533929d0c45ce1e2c983da0ba1fea643c4351a1b33eb8"} Dec 05 19:33:29 crc kubenswrapper[4982]: I1205 19:33:29.987678 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"145ed592-ad9f-487f-940e-71b78c2f48e1","Type":"ContainerStarted","Data":"25bd919fb6d1b5d277f9956e642210082f9f9a7875b1f375e7baa0bd19ce6bf9"} Dec 05 19:33:29 crc kubenswrapper[4982]: I1205 19:33:29.992128 4982 generic.go:334] "Generic (PLEG): container finished" podID="37eb941f-a0a5-4f0f-9504-6c07ec6535a0" containerID="0c3042f205d62fc88d754be3aa7a85469bebd43fa7de42ab1e7b09cced42c1da" exitCode=0 Dec 05 19:33:29 crc kubenswrapper[4982]: I1205 19:33:29.992239 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-sjz6f" event={"ID":"37eb941f-a0a5-4f0f-9504-6c07ec6535a0","Type":"ContainerDied","Data":"0c3042f205d62fc88d754be3aa7a85469bebd43fa7de42ab1e7b09cced42c1da"} Dec 05 19:33:29 crc kubenswrapper[4982]: I1205 19:33:29.994281 4982 generic.go:334] "Generic (PLEG): container finished" podID="e285e4c4-bd4b-49f4-8768-2f88f362481d" containerID="479784d611a18344855a4fb7db71faaa3ede9a6e5a333ee705b585a1c84cfb6d" exitCode=0 Dec 05 19:33:29 crc kubenswrapper[4982]: I1205 19:33:29.994842 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" event={"ID":"e285e4c4-bd4b-49f4-8768-2f88f362481d","Type":"ContainerDied","Data":"479784d611a18344855a4fb7db71faaa3ede9a6e5a333ee705b585a1c84cfb6d"} Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.000783 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"4e3679d1-2b65-494e-bc5f-2a68697da816","Type":"ContainerStarted","Data":"a978109b9ff47362b3a276bade23a2bb0ceab312da83494323a53354e5a607de"} Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.003895 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.004695 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.032542 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.043883 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-xcgh9" podStartSLOduration=10.789002763 podStartE2EDuration="26.043865623s" podCreationTimestamp="2025-12-05 19:33:04 +0000 UTC" firstStartedPulling="2025-12-05 19:33:07.734251335 +0000 UTC m=+1166.616137350" lastFinishedPulling="2025-12-05 19:33:22.989114215 +0000 UTC m=+1181.871000210" observedRunningTime="2025-12-05 19:33:30.035592216 +0000 UTC m=+1188.917478251" watchObservedRunningTime="2025-12-05 19:33:30.043865623 +0000 UTC m=+1188.925751618" Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.116306 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" podStartSLOduration=10.417605903 podStartE2EDuration="26.115782128s" podCreationTimestamp="2025-12-05 19:33:04 +0000 UTC" firstStartedPulling="2025-12-05 19:33:07.730483391 +0000 UTC m=+1166.612369406" lastFinishedPulling="2025-12-05 19:33:23.428659636 +0000 UTC m=+1182.310545631" observedRunningTime="2025-12-05 19:33:30.107658605 +0000 UTC m=+1188.989544620" watchObservedRunningTime="2025-12-05 19:33:30.115782128 +0000 UTC m=+1188.997668123" Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.141071 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-compactor-0" podStartSLOduration=26.141054208 podStartE2EDuration="26.141054208s" podCreationTimestamp="2025-12-05 19:33:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:33:30.136478214 +0000 UTC m=+1189.018364209" watchObservedRunningTime="2025-12-05 19:33:30.141054208 +0000 UTC m=+1189.022940203" Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.575286 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.756711 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.916298 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cdd17104-44d2-452c-a6c2-50a2884a7a6f-dns-svc\") pod \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\" (UID: \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\") " Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.916668 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlffv\" (UniqueName: \"kubernetes.io/projected/cdd17104-44d2-452c-a6c2-50a2884a7a6f-kube-api-access-mlffv\") pod \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\" (UID: \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\") " Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.916735 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdd17104-44d2-452c-a6c2-50a2884a7a6f-config\") pod \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\" (UID: \"cdd17104-44d2-452c-a6c2-50a2884a7a6f\") " Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.921306 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdd17104-44d2-452c-a6c2-50a2884a7a6f-kube-api-access-mlffv" (OuterVolumeSpecName: "kube-api-access-mlffv") pod "cdd17104-44d2-452c-a6c2-50a2884a7a6f" (UID: "cdd17104-44d2-452c-a6c2-50a2884a7a6f"). InnerVolumeSpecName "kube-api-access-mlffv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.973095 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdd17104-44d2-452c-a6c2-50a2884a7a6f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cdd17104-44d2-452c-a6c2-50a2884a7a6f" (UID: "cdd17104-44d2-452c-a6c2-50a2884a7a6f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:30 crc kubenswrapper[4982]: I1205 19:33:30.983653 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdd17104-44d2-452c-a6c2-50a2884a7a6f-config" (OuterVolumeSpecName: "config") pod "cdd17104-44d2-452c-a6c2-50a2884a7a6f" (UID: "cdd17104-44d2-452c-a6c2-50a2884a7a6f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:31 crc kubenswrapper[4982]: I1205 19:33:31.010799 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" event={"ID":"cdd17104-44d2-452c-a6c2-50a2884a7a6f","Type":"ContainerDied","Data":"b25ed1f22d6560ecb86e7e1c96ad6a1ce2f031127ea189c7710df1f6301751cd"} Dec 05 19:33:31 crc kubenswrapper[4982]: I1205 19:33:31.011881 4982 scope.go:117] "RemoveContainer" containerID="26dacd4cff0c74b38dfca03884d7a32b9dad6def60a2eeaf4acc7465d3cee48d" Dec 05 19:33:31 crc kubenswrapper[4982]: I1205 19:33:31.012139 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-ft2hj" Dec 05 19:33:31 crc kubenswrapper[4982]: I1205 19:33:31.018464 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cdd17104-44d2-452c-a6c2-50a2884a7a6f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:31 crc kubenswrapper[4982]: I1205 19:33:31.018490 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlffv\" (UniqueName: \"kubernetes.io/projected/cdd17104-44d2-452c-a6c2-50a2884a7a6f-kube-api-access-mlffv\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:31 crc kubenswrapper[4982]: I1205 19:33:31.018503 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdd17104-44d2-452c-a6c2-50a2884a7a6f-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:31 crc kubenswrapper[4982]: I1205 19:33:31.035875 4982 scope.go:117] "RemoveContainer" containerID="71e7c69443870c6e81c1f681e2fddcc1a8b84e1a092737e46c5f144d94e1c0c1" Dec 05 19:33:31 crc kubenswrapper[4982]: I1205 19:33:31.072887 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-ft2hj"] Dec 05 19:33:31 crc kubenswrapper[4982]: I1205 19:33:31.082253 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-ft2hj"] Dec 05 19:33:31 crc kubenswrapper[4982]: E1205 19:33:31.132656 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-sb-0" podUID="e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0" Dec 05 19:33:31 crc kubenswrapper[4982]: I1205 19:33:31.399069 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdd17104-44d2-452c-a6c2-50a2884a7a6f" path="/var/lib/kubelet/pods/cdd17104-44d2-452c-a6c2-50a2884a7a6f/volumes" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.021812 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-sjz6f" event={"ID":"37eb941f-a0a5-4f0f-9504-6c07ec6535a0","Type":"ContainerStarted","Data":"93e9fb19dd6b836d578e9cdd086d7388d999d3f87b3bf154f0b0089e39d5f1a4"} Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.023679 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.024014 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0","Type":"ContainerStarted","Data":"2fa2446c7eda7749ff448b2fe3ce02653f8dfe2e01e6189b512fda4d435064fe"} Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.025879 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"7d0f7663-0afb-41dc-bae8-7efdafbf2ed2","Type":"ContainerStarted","Data":"729793a0b9e14fb061c13feb50755958c627eac0fa575f6caaba292d778d217f"} Dec 05 19:33:32 crc kubenswrapper[4982]: E1205 19:33:32.026974 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.029737 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" event={"ID":"e285e4c4-bd4b-49f4-8768-2f88f362481d","Type":"ContainerStarted","Data":"5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209"} Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.030334 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.035787 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-df27x" event={"ID":"c19a072d-5061-4c2e-a817-8708ec746095","Type":"ContainerStarted","Data":"111ca70d5fada33c25e643e700938eb760b3c7b7353af66939048d8dfbe3aa72"} Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.044102 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-sjz6f" podStartSLOduration=12.044088302 podStartE2EDuration="12.044088302s" podCreationTimestamp="2025-12-05 19:33:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:33:32.042572934 +0000 UTC m=+1190.924458929" watchObservedRunningTime="2025-12-05 19:33:32.044088302 +0000 UTC m=+1190.925974297" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.061603 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-df27x" podStartSLOduration=5.44551549 podStartE2EDuration="12.061588039s" podCreationTimestamp="2025-12-05 19:33:20 +0000 UTC" firstStartedPulling="2025-12-05 19:33:24.13769276 +0000 UTC m=+1183.019578765" lastFinishedPulling="2025-12-05 19:33:30.753765319 +0000 UTC m=+1189.635651314" observedRunningTime="2025-12-05 19:33:32.058666016 +0000 UTC m=+1190.940552031" watchObservedRunningTime="2025-12-05 19:33:32.061588039 +0000 UTC m=+1190.943474034" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.152413 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" podStartSLOduration=12.152391265 podStartE2EDuration="12.152391265s" podCreationTimestamp="2025-12-05 19:33:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:33:32.143969375 +0000 UTC m=+1191.025855380" watchObservedRunningTime="2025-12-05 19:33:32.152391265 +0000 UTC m=+1191.034277260" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.153295 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=12.38172683 podStartE2EDuration="38.153289277s" podCreationTimestamp="2025-12-05 19:32:54 +0000 UTC" firstStartedPulling="2025-12-05 19:33:04.982320975 +0000 UTC m=+1163.864206970" lastFinishedPulling="2025-12-05 19:33:30.753883422 +0000 UTC m=+1189.635769417" observedRunningTime="2025-12-05 19:33:32.1064945 +0000 UTC m=+1190.988380495" watchObservedRunningTime="2025-12-05 19:33:32.153289277 +0000 UTC m=+1191.035175272" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.246601 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-h8h59"] Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.281433 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zfdp6"] Dec 05 19:33:32 crc kubenswrapper[4982]: E1205 19:33:32.281857 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41538066-4378-432a-8e98-d3816216af50" containerName="init" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.281882 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="41538066-4378-432a-8e98-d3816216af50" containerName="init" Dec 05 19:33:32 crc kubenswrapper[4982]: E1205 19:33:32.281892 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdd17104-44d2-452c-a6c2-50a2884a7a6f" containerName="init" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.281903 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdd17104-44d2-452c-a6c2-50a2884a7a6f" containerName="init" Dec 05 19:33:32 crc kubenswrapper[4982]: E1205 19:33:32.281926 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdd17104-44d2-452c-a6c2-50a2884a7a6f" containerName="dnsmasq-dns" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.281934 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdd17104-44d2-452c-a6c2-50a2884a7a6f" containerName="dnsmasq-dns" Dec 05 19:33:32 crc kubenswrapper[4982]: E1205 19:33:32.281954 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41538066-4378-432a-8e98-d3816216af50" containerName="dnsmasq-dns" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.281961 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="41538066-4378-432a-8e98-d3816216af50" containerName="dnsmasq-dns" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.282179 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdd17104-44d2-452c-a6c2-50a2884a7a6f" containerName="dnsmasq-dns" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.286554 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="41538066-4378-432a-8e98-d3816216af50" containerName="dnsmasq-dns" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.287941 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.290203 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.320081 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zfdp6"] Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.345415 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch7xw\" (UniqueName: \"kubernetes.io/projected/0456e5b5-538b-4878-803d-ed6ae4d61998-kube-api-access-ch7xw\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.345507 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.345571 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-config\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.345603 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.345803 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.373288 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.449246 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-config\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.449323 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.449413 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.449440 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch7xw\" (UniqueName: \"kubernetes.io/projected/0456e5b5-538b-4878-803d-ed6ae4d61998-kube-api-access-ch7xw\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.449530 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.453727 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.453841 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.454081 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.454097 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-config\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.477132 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch7xw\" (UniqueName: \"kubernetes.io/projected/0456e5b5-538b-4878-803d-ed6ae4d61998-kube-api-access-ch7xw\") pod \"dnsmasq-dns-b8fbc5445-zfdp6\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:32 crc kubenswrapper[4982]: I1205 19:33:32.613801 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.043588 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.091486 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.209914 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zfdp6"] Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.472271 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.546419 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.546587 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.550446 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.550498 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.550707 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.550738 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-t797b" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.674736 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/03bef3aa-7dbf-41c2-8754-7be39af98913-lock\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.674795 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/03bef3aa-7dbf-41c2-8754-7be39af98913-cache\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.674855 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.674937 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-514be48e-ee77-4ebf-bb5a-fb797ceb5421\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-514be48e-ee77-4ebf-bb5a-fb797ceb5421\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.674969 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zg9s\" (UniqueName: \"kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-kube-api-access-8zg9s\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.776750 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/03bef3aa-7dbf-41c2-8754-7be39af98913-lock\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.776794 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/03bef3aa-7dbf-41c2-8754-7be39af98913-cache\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.776834 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.776883 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-514be48e-ee77-4ebf-bb5a-fb797ceb5421\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-514be48e-ee77-4ebf-bb5a-fb797ceb5421\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.776918 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zg9s\" (UniqueName: \"kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-kube-api-access-8zg9s\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: E1205 19:33:33.777378 4982 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 19:33:33 crc kubenswrapper[4982]: E1205 19:33:33.777393 4982 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 19:33:33 crc kubenswrapper[4982]: E1205 19:33:33.777431 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift podName:03bef3aa-7dbf-41c2-8754-7be39af98913 nodeName:}" failed. No retries permitted until 2025-12-05 19:33:34.277415902 +0000 UTC m=+1193.159301897 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift") pod "swift-storage-0" (UID: "03bef3aa-7dbf-41c2-8754-7be39af98913") : configmap "swift-ring-files" not found Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.777557 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/03bef3aa-7dbf-41c2-8754-7be39af98913-lock\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.777605 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/03bef3aa-7dbf-41c2-8754-7be39af98913-cache\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.781522 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.781558 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-514be48e-ee77-4ebf-bb5a-fb797ceb5421\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-514be48e-ee77-4ebf-bb5a-fb797ceb5421\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/91fb72231646a7f64b51f6199d1b32495b76298758157e3689073196c70d15ea/globalmount\"" pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.794803 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zg9s\" (UniqueName: \"kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-kube-api-access-8zg9s\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.814585 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-514be48e-ee77-4ebf-bb5a-fb797ceb5421\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-514be48e-ee77-4ebf-bb5a-fb797ceb5421\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.922552 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-hrhl7"] Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.924358 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.928687 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.930227 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.930747 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-hrhl7"] Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.934506 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.980461 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/155eecea-ebae-400d-a81e-1d28392b290e-scripts\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.980631 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/155eecea-ebae-400d-a81e-1d28392b290e-ring-data-devices\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.980699 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/155eecea-ebae-400d-a81e-1d28392b290e-etc-swift\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.980727 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-dispersionconf\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.981001 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-combined-ca-bundle\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.981045 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhr22\" (UniqueName: \"kubernetes.io/projected/155eecea-ebae-400d-a81e-1d28392b290e-kube-api-access-rhr22\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:33 crc kubenswrapper[4982]: I1205 19:33:33.981084 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-swiftconf\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.066483 4982 generic.go:334] "Generic (PLEG): container finished" podID="0456e5b5-538b-4878-803d-ed6ae4d61998" containerID="7c547d2fe211cfdd09eff3f3d9b4116cf7a8965025b7a7bff9bc3f0aca0181ff" exitCode=0 Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.066584 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" event={"ID":"0456e5b5-538b-4878-803d-ed6ae4d61998","Type":"ContainerDied","Data":"7c547d2fe211cfdd09eff3f3d9b4116cf7a8965025b7a7bff9bc3f0aca0181ff"} Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.066612 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" event={"ID":"0456e5b5-538b-4878-803d-ed6ae4d61998","Type":"ContainerStarted","Data":"e2ebc6208995f7f9d43a32d89405d603a02dc339ced8d6db5d1bc96f415f6f40"} Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.073380 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0","Type":"ContainerStarted","Data":"b9440dfa84884cf0a22b05b249724649f2f5e106f52d308b894e6aa775e57ba4"} Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.076525 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2c4d593c-5baa-4b09-b586-7b0e65acaa73","Type":"ContainerStarted","Data":"e0e667bb7326f9117e021442929b3a4f8127da2964721e14c6fe962fcb1c5cf5"} Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.077763 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" podUID="e285e4c4-bd4b-49f4-8768-2f88f362481d" containerName="dnsmasq-dns" containerID="cri-o://5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209" gracePeriod=10 Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.082127 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/155eecea-ebae-400d-a81e-1d28392b290e-ring-data-devices\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.082305 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/155eecea-ebae-400d-a81e-1d28392b290e-etc-swift\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.082331 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-dispersionconf\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.082377 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-combined-ca-bundle\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.082402 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhr22\" (UniqueName: \"kubernetes.io/projected/155eecea-ebae-400d-a81e-1d28392b290e-kube-api-access-rhr22\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.082436 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-swiftconf\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.082540 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/155eecea-ebae-400d-a81e-1d28392b290e-scripts\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.084606 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/155eecea-ebae-400d-a81e-1d28392b290e-scripts\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.086829 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/155eecea-ebae-400d-a81e-1d28392b290e-ring-data-devices\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.087135 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/155eecea-ebae-400d-a81e-1d28392b290e-etc-swift\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.087989 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-swiftconf\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.092717 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-dispersionconf\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.094731 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-combined-ca-bundle\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.097899 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhr22\" (UniqueName: \"kubernetes.io/projected/155eecea-ebae-400d-a81e-1d28392b290e-kube-api-access-rhr22\") pod \"swift-ring-rebalance-hrhl7\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.110725 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=6.415789256 podStartE2EDuration="36.11070552s" podCreationTimestamp="2025-12-05 19:32:58 +0000 UTC" firstStartedPulling="2025-12-05 19:33:03.896564487 +0000 UTC m=+1162.778450482" lastFinishedPulling="2025-12-05 19:33:33.591480751 +0000 UTC m=+1192.473366746" observedRunningTime="2025-12-05 19:33:34.10872993 +0000 UTC m=+1192.990615945" watchObservedRunningTime="2025-12-05 19:33:34.11070552 +0000 UTC m=+1192.992591525" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.287160 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:34 crc kubenswrapper[4982]: E1205 19:33:34.287526 4982 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 19:33:34 crc kubenswrapper[4982]: E1205 19:33:34.287561 4982 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 19:33:34 crc kubenswrapper[4982]: E1205 19:33:34.287626 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift podName:03bef3aa-7dbf-41c2-8754-7be39af98913 nodeName:}" failed. No retries permitted until 2025-12-05 19:33:35.287602885 +0000 UTC m=+1194.169488880 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift") pod "swift-storage-0" (UID: "03bef3aa-7dbf-41c2-8754-7be39af98913") : configmap "swift-ring-files" not found Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.362516 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.545959 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.604891 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-dns-svc\") pod \"e285e4c4-bd4b-49f4-8768-2f88f362481d\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.605343 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-ovsdbserver-nb\") pod \"e285e4c4-bd4b-49f4-8768-2f88f362481d\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.605417 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-config\") pod \"e285e4c4-bd4b-49f4-8768-2f88f362481d\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.605501 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tmqrm\" (UniqueName: \"kubernetes.io/projected/e285e4c4-bd4b-49f4-8768-2f88f362481d-kube-api-access-tmqrm\") pod \"e285e4c4-bd4b-49f4-8768-2f88f362481d\" (UID: \"e285e4c4-bd4b-49f4-8768-2f88f362481d\") " Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.635824 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e285e4c4-bd4b-49f4-8768-2f88f362481d-kube-api-access-tmqrm" (OuterVolumeSpecName: "kube-api-access-tmqrm") pod "e285e4c4-bd4b-49f4-8768-2f88f362481d" (UID: "e285e4c4-bd4b-49f4-8768-2f88f362481d"). InnerVolumeSpecName "kube-api-access-tmqrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.665931 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e285e4c4-bd4b-49f4-8768-2f88f362481d" (UID: "e285e4c4-bd4b-49f4-8768-2f88f362481d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.673105 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e285e4c4-bd4b-49f4-8768-2f88f362481d" (UID: "e285e4c4-bd4b-49f4-8768-2f88f362481d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.674749 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-config" (OuterVolumeSpecName: "config") pod "e285e4c4-bd4b-49f4-8768-2f88f362481d" (UID: "e285e4c4-bd4b-49f4-8768-2f88f362481d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.707832 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.707869 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tmqrm\" (UniqueName: \"kubernetes.io/projected/e285e4c4-bd4b-49f4-8768-2f88f362481d-kube-api-access-tmqrm\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.707887 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.707898 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e285e4c4-bd4b-49f4-8768-2f88f362481d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:34 crc kubenswrapper[4982]: I1205 19:33:34.903024 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-hrhl7"] Dec 05 19:33:34 crc kubenswrapper[4982]: W1205 19:33:34.910779 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod155eecea_ebae_400d_a81e_1d28392b290e.slice/crio-acc96afda50e9c1f54e3ce9eb9da42379921927977977b4640eec2a321501a9f WatchSource:0}: Error finding container acc96afda50e9c1f54e3ce9eb9da42379921927977977b4640eec2a321501a9f: Status 404 returned error can't find the container with id acc96afda50e9c1f54e3ce9eb9da42379921927977977b4640eec2a321501a9f Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.085526 4982 generic.go:334] "Generic (PLEG): container finished" podID="4e3679d1-2b65-494e-bc5f-2a68697da816" containerID="a978109b9ff47362b3a276bade23a2bb0ceab312da83494323a53354e5a607de" exitCode=0 Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.085649 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"4e3679d1-2b65-494e-bc5f-2a68697da816","Type":"ContainerDied","Data":"a978109b9ff47362b3a276bade23a2bb0ceab312da83494323a53354e5a607de"} Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.086930 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hrhl7" event={"ID":"155eecea-ebae-400d-a81e-1d28392b290e","Type":"ContainerStarted","Data":"acc96afda50e9c1f54e3ce9eb9da42379921927977977b4640eec2a321501a9f"} Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.091363 4982 generic.go:334] "Generic (PLEG): container finished" podID="d452a876-70e0-416c-ab4d-667b53e8f86e" containerID="d30ff30e53baeb0b2218ec794f8dd31da1958203808d98e090a66e63b84319ba" exitCode=0 Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.091428 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-sct9d" event={"ID":"d452a876-70e0-416c-ab4d-667b53e8f86e","Type":"ContainerDied","Data":"d30ff30e53baeb0b2218ec794f8dd31da1958203808d98e090a66e63b84319ba"} Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.095931 4982 generic.go:334] "Generic (PLEG): container finished" podID="e285e4c4-bd4b-49f4-8768-2f88f362481d" containerID="5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209" exitCode=0 Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.095993 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" event={"ID":"e285e4c4-bd4b-49f4-8768-2f88f362481d","Type":"ContainerDied","Data":"5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209"} Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.096022 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" event={"ID":"e285e4c4-bd4b-49f4-8768-2f88f362481d","Type":"ContainerDied","Data":"1745cb7e2ffaee027cb1565aa3fe7f4c279c4dedf7cc940c5bc99f13b0d81b30"} Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.096038 4982 scope.go:117] "RemoveContainer" containerID="5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.096195 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-h8h59" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.101490 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jhsjf" event={"ID":"c4d66436-88ae-4023-9601-bd2aa6954667","Type":"ContainerStarted","Data":"a725d090d7507154d93262bc85fad6a352f0cb554d187790b255b369d6acc4bf"} Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.102211 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-jhsjf" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.103715 4982 generic.go:334] "Generic (PLEG): container finished" podID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerID="e2b5d627abb81fcf217497bed705f68b49a9e541aa87fc9be4ce37ceb9eae824" exitCode=0 Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.103771 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce039d16-a4f9-40f3-9398-f6c2efc89b41","Type":"ContainerDied","Data":"e2b5d627abb81fcf217497bed705f68b49a9e541aa87fc9be4ce37ceb9eae824"} Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.108817 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"af72a355-0521-4724-8224-c7fd9046b4d6","Type":"ContainerStarted","Data":"125a6ec84e609bea013a2953054ad721d22006e4ee8f138ee425769a168e6c06"} Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.111446 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b6542514-631f-4a81-aba8-11cfebd33048","Type":"ContainerStarted","Data":"3ba27f311e87ea9a9ba5383ec5a161481f8c5b043dd362ffcda5d20b09699f93"} Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.115680 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" event={"ID":"0456e5b5-538b-4878-803d-ed6ae4d61998","Type":"ContainerStarted","Data":"02897d28ae6fba5443272ca81447b43cd6779ffaf119577b54483eb060ebb87a"} Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.115907 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.130751 4982 scope.go:117] "RemoveContainer" containerID="479784d611a18344855a4fb7db71faaa3ede9a6e5a333ee705b585a1c84cfb6d" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.170057 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-jhsjf" podStartSLOduration=9.007480372 podStartE2EDuration="39.170043237s" podCreationTimestamp="2025-12-05 19:32:56 +0000 UTC" firstStartedPulling="2025-12-05 19:33:03.639471791 +0000 UTC m=+1162.521357786" lastFinishedPulling="2025-12-05 19:33:33.802034656 +0000 UTC m=+1192.683920651" observedRunningTime="2025-12-05 19:33:35.167960795 +0000 UTC m=+1194.049846790" watchObservedRunningTime="2025-12-05 19:33:35.170043237 +0000 UTC m=+1194.051929232" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.171992 4982 scope.go:117] "RemoveContainer" containerID="5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209" Dec 05 19:33:35 crc kubenswrapper[4982]: E1205 19:33:35.172438 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209\": container with ID starting with 5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209 not found: ID does not exist" containerID="5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.172493 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209"} err="failed to get container status \"5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209\": rpc error: code = NotFound desc = could not find container \"5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209\": container with ID starting with 5383bb2f4a234d12a12bdd75e8d7ece73622a86c9f885f585ef79f166f655209 not found: ID does not exist" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.172526 4982 scope.go:117] "RemoveContainer" containerID="479784d611a18344855a4fb7db71faaa3ede9a6e5a333ee705b585a1c84cfb6d" Dec 05 19:33:35 crc kubenswrapper[4982]: E1205 19:33:35.172883 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"479784d611a18344855a4fb7db71faaa3ede9a6e5a333ee705b585a1c84cfb6d\": container with ID starting with 479784d611a18344855a4fb7db71faaa3ede9a6e5a333ee705b585a1c84cfb6d not found: ID does not exist" containerID="479784d611a18344855a4fb7db71faaa3ede9a6e5a333ee705b585a1c84cfb6d" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.172924 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"479784d611a18344855a4fb7db71faaa3ede9a6e5a333ee705b585a1c84cfb6d"} err="failed to get container status \"479784d611a18344855a4fb7db71faaa3ede9a6e5a333ee705b585a1c84cfb6d\": rpc error: code = NotFound desc = could not find container \"479784d611a18344855a4fb7db71faaa3ede9a6e5a333ee705b585a1c84cfb6d\": container with ID starting with 479784d611a18344855a4fb7db71faaa3ede9a6e5a333ee705b585a1c84cfb6d not found: ID does not exist" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.201784 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-h8h59"] Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.210489 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-h8h59"] Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.249713 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" podStartSLOduration=3.249695485 podStartE2EDuration="3.249695485s" podCreationTimestamp="2025-12-05 19:33:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:33:35.242161987 +0000 UTC m=+1194.124047982" watchObservedRunningTime="2025-12-05 19:33:35.249695485 +0000 UTC m=+1194.131581490" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.348623 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:35 crc kubenswrapper[4982]: E1205 19:33:35.348822 4982 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 19:33:35 crc kubenswrapper[4982]: E1205 19:33:35.348870 4982 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 19:33:35 crc kubenswrapper[4982]: E1205 19:33:35.348946 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift podName:03bef3aa-7dbf-41c2-8754-7be39af98913 nodeName:}" failed. No retries permitted until 2025-12-05 19:33:37.348921601 +0000 UTC m=+1196.230807596 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift") pod "swift-storage-0" (UID: "03bef3aa-7dbf-41c2-8754-7be39af98913") : configmap "swift-ring-files" not found Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.405448 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e285e4c4-bd4b-49f4-8768-2f88f362481d" path="/var/lib/kubelet/pods/e285e4c4-bd4b-49f4-8768-2f88f362481d/volumes" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.406013 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:35 crc kubenswrapper[4982]: I1205 19:33:35.591371 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:36 crc kubenswrapper[4982]: I1205 19:33:36.129997 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-sct9d" event={"ID":"d452a876-70e0-416c-ab4d-667b53e8f86e","Type":"ContainerStarted","Data":"225e337d9277293066330bf029373a2410e301f433de4d253cd255b0496067f2"} Dec 05 19:33:36 crc kubenswrapper[4982]: I1205 19:33:36.130042 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-sct9d" event={"ID":"d452a876-70e0-416c-ab4d-667b53e8f86e","Type":"ContainerStarted","Data":"f2c9affba2b6492966e3dcc5bf11e9485dd8afaa97621d9b39cfc60fa5d82e9b"} Dec 05 19:33:36 crc kubenswrapper[4982]: I1205 19:33:36.131524 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:33:36 crc kubenswrapper[4982]: I1205 19:33:36.131620 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:33:36 crc kubenswrapper[4982]: I1205 19:33:36.157784 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-sct9d" podStartSLOduration=11.094501098 podStartE2EDuration="40.157763608s" podCreationTimestamp="2025-12-05 19:32:56 +0000 UTC" firstStartedPulling="2025-12-05 19:33:04.799474371 +0000 UTC m=+1163.681360366" lastFinishedPulling="2025-12-05 19:33:33.862736881 +0000 UTC m=+1192.744622876" observedRunningTime="2025-12-05 19:33:36.152752873 +0000 UTC m=+1195.034638888" watchObservedRunningTime="2025-12-05 19:33:36.157763608 +0000 UTC m=+1195.039649603" Dec 05 19:33:36 crc kubenswrapper[4982]: I1205 19:33:36.390533 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:36 crc kubenswrapper[4982]: I1205 19:33:36.433426 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:37 crc kubenswrapper[4982]: I1205 19:33:37.404808 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:37 crc kubenswrapper[4982]: E1205 19:33:37.404965 4982 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 19:33:37 crc kubenswrapper[4982]: E1205 19:33:37.404977 4982 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 19:33:37 crc kubenswrapper[4982]: E1205 19:33:37.405026 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift podName:03bef3aa-7dbf-41c2-8754-7be39af98913 nodeName:}" failed. No retries permitted until 2025-12-05 19:33:41.405012316 +0000 UTC m=+1200.286898311 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift") pod "swift-storage-0" (UID: "03bef3aa-7dbf-41c2-8754-7be39af98913") : configmap "swift-ring-files" not found Dec 05 19:33:39 crc kubenswrapper[4982]: I1205 19:33:39.161086 4982 generic.go:334] "Generic (PLEG): container finished" podID="af72a355-0521-4724-8224-c7fd9046b4d6" containerID="125a6ec84e609bea013a2953054ad721d22006e4ee8f138ee425769a168e6c06" exitCode=0 Dec 05 19:33:39 crc kubenswrapper[4982]: I1205 19:33:39.161234 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"af72a355-0521-4724-8224-c7fd9046b4d6","Type":"ContainerDied","Data":"125a6ec84e609bea013a2953054ad721d22006e4ee8f138ee425769a168e6c06"} Dec 05 19:33:39 crc kubenswrapper[4982]: I1205 19:33:39.164897 4982 generic.go:334] "Generic (PLEG): container finished" podID="b6542514-631f-4a81-aba8-11cfebd33048" containerID="3ba27f311e87ea9a9ba5383ec5a161481f8c5b043dd362ffcda5d20b09699f93" exitCode=0 Dec 05 19:33:39 crc kubenswrapper[4982]: I1205 19:33:39.164939 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b6542514-631f-4a81-aba8-11cfebd33048","Type":"ContainerDied","Data":"3ba27f311e87ea9a9ba5383ec5a161481f8c5b043dd362ffcda5d20b09699f93"} Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.436205 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.566692 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 19:33:40 crc kubenswrapper[4982]: E1205 19:33:40.567091 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e285e4c4-bd4b-49f4-8768-2f88f362481d" containerName="init" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.567105 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e285e4c4-bd4b-49f4-8768-2f88f362481d" containerName="init" Dec 05 19:33:40 crc kubenswrapper[4982]: E1205 19:33:40.567125 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e285e4c4-bd4b-49f4-8768-2f88f362481d" containerName="dnsmasq-dns" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.567131 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e285e4c4-bd4b-49f4-8768-2f88f362481d" containerName="dnsmasq-dns" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.567323 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e285e4c4-bd4b-49f4-8768-2f88f362481d" containerName="dnsmasq-dns" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.568830 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.571457 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.571777 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-lhbzr" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.571891 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.572875 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.600838 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.666041 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8vhl\" (UniqueName: \"kubernetes.io/projected/e6b8e1da-6aa2-4556-a427-35c1f9920482-kube-api-access-t8vhl\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.666122 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6b8e1da-6aa2-4556-a427-35c1f9920482-config\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.666156 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6b8e1da-6aa2-4556-a427-35c1f9920482-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.666290 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e6b8e1da-6aa2-4556-a427-35c1f9920482-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.666326 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6b8e1da-6aa2-4556-a427-35c1f9920482-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.666488 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6b8e1da-6aa2-4556-a427-35c1f9920482-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.666572 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6b8e1da-6aa2-4556-a427-35c1f9920482-scripts\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.767941 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6b8e1da-6aa2-4556-a427-35c1f9920482-config\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.768373 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6b8e1da-6aa2-4556-a427-35c1f9920482-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.768583 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e6b8e1da-6aa2-4556-a427-35c1f9920482-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.768730 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6b8e1da-6aa2-4556-a427-35c1f9920482-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.768853 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6b8e1da-6aa2-4556-a427-35c1f9920482-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.768974 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6b8e1da-6aa2-4556-a427-35c1f9920482-scripts\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.769112 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8vhl\" (UniqueName: \"kubernetes.io/projected/e6b8e1da-6aa2-4556-a427-35c1f9920482-kube-api-access-t8vhl\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.769559 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/e6b8e1da-6aa2-4556-a427-35c1f9920482-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.770098 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6b8e1da-6aa2-4556-a427-35c1f9920482-scripts\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.770504 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e6b8e1da-6aa2-4556-a427-35c1f9920482-config\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.776126 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6b8e1da-6aa2-4556-a427-35c1f9920482-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.776331 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6b8e1da-6aa2-4556-a427-35c1f9920482-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.777314 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6b8e1da-6aa2-4556-a427-35c1f9920482-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.781313 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.787131 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8vhl\" (UniqueName: \"kubernetes.io/projected/e6b8e1da-6aa2-4556-a427-35c1f9920482-kube-api-access-t8vhl\") pod \"ovn-northd-0\" (UID: \"e6b8e1da-6aa2-4556-a427-35c1f9920482\") " pod="openstack/ovn-northd-0" Dec 05 19:33:40 crc kubenswrapper[4982]: I1205 19:33:40.893496 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 19:33:41 crc kubenswrapper[4982]: I1205 19:33:41.486921 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:41 crc kubenswrapper[4982]: E1205 19:33:41.487098 4982 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 19:33:41 crc kubenswrapper[4982]: E1205 19:33:41.487119 4982 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 19:33:41 crc kubenswrapper[4982]: E1205 19:33:41.487200 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift podName:03bef3aa-7dbf-41c2-8754-7be39af98913 nodeName:}" failed. No retries permitted until 2025-12-05 19:33:49.487184415 +0000 UTC m=+1208.369070410 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift") pod "swift-storage-0" (UID: "03bef3aa-7dbf-41c2-8754-7be39af98913") : configmap "swift-ring-files" not found Dec 05 19:33:42 crc kubenswrapper[4982]: I1205 19:33:42.557271 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:33:42 crc kubenswrapper[4982]: I1205 19:33:42.557999 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:33:42 crc kubenswrapper[4982]: I1205 19:33:42.615305 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:33:42 crc kubenswrapper[4982]: I1205 19:33:42.707749 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-sjz6f"] Dec 05 19:33:42 crc kubenswrapper[4982]: I1205 19:33:42.708008 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-sjz6f" podUID="37eb941f-a0a5-4f0f-9504-6c07ec6535a0" containerName="dnsmasq-dns" containerID="cri-o://93e9fb19dd6b836d578e9cdd086d7388d999d3f87b3bf154f0b0089e39d5f1a4" gracePeriod=10 Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.210621 4982 generic.go:334] "Generic (PLEG): container finished" podID="37eb941f-a0a5-4f0f-9504-6c07ec6535a0" containerID="93e9fb19dd6b836d578e9cdd086d7388d999d3f87b3bf154f0b0089e39d5f1a4" exitCode=0 Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.210679 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-sjz6f" event={"ID":"37eb941f-a0a5-4f0f-9504-6c07ec6535a0","Type":"ContainerDied","Data":"93e9fb19dd6b836d578e9cdd086d7388d999d3f87b3bf154f0b0089e39d5f1a4"} Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.540644 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-zrl2q" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.651314 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.651547 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.664763 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-xvg29" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.757902 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sx5kt\" (UniqueName: \"kubernetes.io/projected/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-kube-api-access-sx5kt\") pod \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.757990 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-dns-svc\") pod \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.758045 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-ovsdbserver-sb\") pod \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.758128 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-config\") pod \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.758304 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-ovsdbserver-nb\") pod \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\" (UID: \"37eb941f-a0a5-4f0f-9504-6c07ec6535a0\") " Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.777856 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-kube-api-access-sx5kt" (OuterVolumeSpecName: "kube-api-access-sx5kt") pod "37eb941f-a0a5-4f0f-9504-6c07ec6535a0" (UID: "37eb941f-a0a5-4f0f-9504-6c07ec6535a0"). InnerVolumeSpecName "kube-api-access-sx5kt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.810913 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.860474 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sx5kt\" (UniqueName: \"kubernetes.io/projected/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-kube-api-access-sx5kt\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.935952 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "37eb941f-a0a5-4f0f-9504-6c07ec6535a0" (UID: "37eb941f-a0a5-4f0f-9504-6c07ec6535a0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.941365 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-config" (OuterVolumeSpecName: "config") pod "37eb941f-a0a5-4f0f-9504-6c07ec6535a0" (UID: "37eb941f-a0a5-4f0f-9504-6c07ec6535a0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.944721 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "37eb941f-a0a5-4f0f-9504-6c07ec6535a0" (UID: "37eb941f-a0a5-4f0f-9504-6c07ec6535a0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.946199 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "37eb941f-a0a5-4f0f-9504-6c07ec6535a0" (UID: "37eb941f-a0a5-4f0f-9504-6c07ec6535a0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.962581 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.962853 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.962862 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:44 crc kubenswrapper[4982]: I1205 19:33:44.962870 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37eb941f-a0a5-4f0f-9504-6c07ec6535a0-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.226104 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"b6542514-631f-4a81-aba8-11cfebd33048","Type":"ContainerStarted","Data":"d137b5209d5b43ac5f8f93ace88ada0eb82db662872b4b6a0c53593e4f857c27"} Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.228299 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hrhl7" event={"ID":"155eecea-ebae-400d-a81e-1d28392b290e","Type":"ContainerStarted","Data":"bf5d20cb2ec369ebcc88a4f483a8e856915a7a47ce8e13c11dab856c41e22a3e"} Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.230430 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-sjz6f" Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.231284 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-sjz6f" event={"ID":"37eb941f-a0a5-4f0f-9504-6c07ec6535a0","Type":"ContainerDied","Data":"aa6b5ceea64582a55ae12f63e7bb030e395a9636bcb49bf4c06727e743697eca"} Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.231322 4982 scope.go:117] "RemoveContainer" containerID="93e9fb19dd6b836d578e9cdd086d7388d999d3f87b3bf154f0b0089e39d5f1a4" Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.232400 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e6b8e1da-6aa2-4556-a427-35c1f9920482","Type":"ContainerStarted","Data":"30e327809eb37bed7ed7fe67413588ed5fb665502a2170f2cf9a456e002fd263"} Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.242578 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce039d16-a4f9-40f3-9398-f6c2efc89b41","Type":"ContainerStarted","Data":"ea1d536fde382cd9a652650c7cd67d90386f7d15a08b1066a52fcd7f98c0dfbb"} Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.244436 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"4e3679d1-2b65-494e-bc5f-2a68697da816","Type":"ContainerStarted","Data":"0e29df1d768ae6685ac256df334ded134beb4435745cef3566953d40b9d64fd7"} Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.248551 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"af72a355-0521-4724-8224-c7fd9046b4d6","Type":"ContainerStarted","Data":"7784abeccda932de21e3d34815de4e8761fcd9865336e7a1456424ce5597952d"} Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.258186 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"700dc7aa-a441-4419-90c8-ff6ec6d31f23","Type":"ContainerStarted","Data":"1f137d81ec4e7d502590effaff67e660004627a4336b654d4cff1c954480cb1a"} Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.258431 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.268571 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=-9223371978.58623 podStartE2EDuration="58.268545597s" podCreationTimestamp="2025-12-05 19:32:47 +0000 UTC" firstStartedPulling="2025-12-05 19:33:02.491403928 +0000 UTC m=+1161.373289933" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:33:45.252526507 +0000 UTC m=+1204.134412522" watchObservedRunningTime="2025-12-05 19:33:45.268545597 +0000 UTC m=+1204.150431632" Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.283303 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=27.256372185 podStartE2EDuration="57.283281515s" podCreationTimestamp="2025-12-05 19:32:48 +0000 UTC" firstStartedPulling="2025-12-05 19:33:03.836433936 +0000 UTC m=+1162.718319931" lastFinishedPulling="2025-12-05 19:33:33.863343256 +0000 UTC m=+1192.745229261" observedRunningTime="2025-12-05 19:33:45.272968698 +0000 UTC m=+1204.154854693" watchObservedRunningTime="2025-12-05 19:33:45.283281515 +0000 UTC m=+1204.165167520" Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.301761 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-hrhl7" podStartSLOduration=2.881513433 podStartE2EDuration="12.301740386s" podCreationTimestamp="2025-12-05 19:33:33 +0000 UTC" firstStartedPulling="2025-12-05 19:33:34.924435047 +0000 UTC m=+1193.806321042" lastFinishedPulling="2025-12-05 19:33:44.344662 +0000 UTC m=+1203.226547995" observedRunningTime="2025-12-05 19:33:45.298549066 +0000 UTC m=+1204.180435081" watchObservedRunningTime="2025-12-05 19:33:45.301740386 +0000 UTC m=+1204.183626391" Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.328552 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=13.695087026 podStartE2EDuration="54.328533975s" podCreationTimestamp="2025-12-05 19:32:51 +0000 UTC" firstStartedPulling="2025-12-05 19:33:03.827406441 +0000 UTC m=+1162.709292426" lastFinishedPulling="2025-12-05 19:33:44.46085339 +0000 UTC m=+1203.342739375" observedRunningTime="2025-12-05 19:33:45.314198377 +0000 UTC m=+1204.196084392" watchObservedRunningTime="2025-12-05 19:33:45.328533975 +0000 UTC m=+1204.210419970" Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.345984 4982 scope.go:117] "RemoveContainer" containerID="0c3042f205d62fc88d754be3aa7a85469bebd43fa7de42ab1e7b09cced42c1da" Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.353329 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-sjz6f"] Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.367689 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-sjz6f"] Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.403697 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37eb941f-a0a5-4f0f-9504-6c07ec6535a0" path="/var/lib/kubelet/pods/37eb941f-a0a5-4f0f-9504-6c07ec6535a0/volumes" Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.596675 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.665461 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 19:33:45 crc kubenswrapper[4982]: I1205 19:33:45.812508 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="85a8d068-da43-4ed2-879a-281872eab097" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 05 19:33:47 crc kubenswrapper[4982]: I1205 19:33:47.282449 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e6b8e1da-6aa2-4556-a427-35c1f9920482","Type":"ContainerStarted","Data":"5fcd4c52fecfe4f162249e69b0860d13945c97c7c9d3c46487d002bc8014bcc9"} Dec 05 19:33:47 crc kubenswrapper[4982]: I1205 19:33:47.282824 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"e6b8e1da-6aa2-4556-a427-35c1f9920482","Type":"ContainerStarted","Data":"29c99c24c3eadadbc6139f079b65d83be3b8ae2f6af2fc57aa715c0a628c1089"} Dec 05 19:33:47 crc kubenswrapper[4982]: E1205 19:33:47.626532 4982 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.94:53176->38.102.83.94:34727: read tcp 38.102.83.94:53176->38.102.83.94:34727: read: connection reset by peer Dec 05 19:33:48 crc kubenswrapper[4982]: I1205 19:33:48.296066 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"4e3679d1-2b65-494e-bc5f-2a68697da816","Type":"ContainerStarted","Data":"8fb2592795c8b02ed7931ba5ac0d391cd15744f02a71f064a06c38411d751bc1"} Dec 05 19:33:48 crc kubenswrapper[4982]: I1205 19:33:48.296359 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Dec 05 19:33:48 crc kubenswrapper[4982]: I1205 19:33:48.296437 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 19:33:48 crc kubenswrapper[4982]: I1205 19:33:48.299916 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Dec 05 19:33:48 crc kubenswrapper[4982]: I1205 19:33:48.323428 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=15.749328033 podStartE2EDuration="56.32340144s" podCreationTimestamp="2025-12-05 19:32:52 +0000 UTC" firstStartedPulling="2025-12-05 19:33:03.752890911 +0000 UTC m=+1162.634776916" lastFinishedPulling="2025-12-05 19:33:44.326964328 +0000 UTC m=+1203.208850323" observedRunningTime="2025-12-05 19:33:48.322442566 +0000 UTC m=+1207.204328591" watchObservedRunningTime="2025-12-05 19:33:48.32340144 +0000 UTC m=+1207.205287495" Dec 05 19:33:48 crc kubenswrapper[4982]: I1205 19:33:48.359399 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=6.994406442 podStartE2EDuration="8.359376908s" podCreationTimestamp="2025-12-05 19:33:40 +0000 UTC" firstStartedPulling="2025-12-05 19:33:44.821342307 +0000 UTC m=+1203.703228302" lastFinishedPulling="2025-12-05 19:33:46.186312773 +0000 UTC m=+1205.068198768" observedRunningTime="2025-12-05 19:33:48.35586275 +0000 UTC m=+1207.237748765" watchObservedRunningTime="2025-12-05 19:33:48.359376908 +0000 UTC m=+1207.241262903" Dec 05 19:33:48 crc kubenswrapper[4982]: I1205 19:33:48.834404 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 19:33:48 crc kubenswrapper[4982]: I1205 19:33:48.834515 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 19:33:49 crc kubenswrapper[4982]: I1205 19:33:49.307316 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce039d16-a4f9-40f3-9398-f6c2efc89b41","Type":"ContainerStarted","Data":"40aa78d8b657813f6b30a70d50a63cb6c41159a99808e9e9ed859cc153811fcc"} Dec 05 19:33:49 crc kubenswrapper[4982]: I1205 19:33:49.559533 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:33:49 crc kubenswrapper[4982]: E1205 19:33:49.559701 4982 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 19:33:49 crc kubenswrapper[4982]: E1205 19:33:49.559986 4982 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 19:33:49 crc kubenswrapper[4982]: E1205 19:33:49.560106 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift podName:03bef3aa-7dbf-41c2-8754-7be39af98913 nodeName:}" failed. No retries permitted until 2025-12-05 19:34:05.560091914 +0000 UTC m=+1224.441977909 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift") pod "swift-storage-0" (UID: "03bef3aa-7dbf-41c2-8754-7be39af98913") : configmap "swift-ring-files" not found Dec 05 19:33:50 crc kubenswrapper[4982]: I1205 19:33:50.585987 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 19:33:50 crc kubenswrapper[4982]: I1205 19:33:50.586046 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 19:33:50 crc kubenswrapper[4982]: I1205 19:33:50.997754 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 19:33:51 crc kubenswrapper[4982]: I1205 19:33:51.085869 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 19:33:52 crc kubenswrapper[4982]: I1205 19:33:52.296242 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 19:33:52 crc kubenswrapper[4982]: I1205 19:33:52.332969 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce039d16-a4f9-40f3-9398-f6c2efc89b41","Type":"ContainerStarted","Data":"a8aa7d5a95add3263ebfc5afbc76a9f0f683006b316a10fd172325616e0290d8"} Dec 05 19:33:52 crc kubenswrapper[4982]: I1205 19:33:52.337559 4982 generic.go:334] "Generic (PLEG): container finished" podID="155eecea-ebae-400d-a81e-1d28392b290e" containerID="bf5d20cb2ec369ebcc88a4f483a8e856915a7a47ce8e13c11dab856c41e22a3e" exitCode=0 Dec 05 19:33:52 crc kubenswrapper[4982]: I1205 19:33:52.337595 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hrhl7" event={"ID":"155eecea-ebae-400d-a81e-1d28392b290e","Type":"ContainerDied","Data":"bf5d20cb2ec369ebcc88a4f483a8e856915a7a47ce8e13c11dab856c41e22a3e"} Dec 05 19:33:52 crc kubenswrapper[4982]: I1205 19:33:52.402299 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=12.982790646 podStartE2EDuration="1m0.402279027s" podCreationTimestamp="2025-12-05 19:32:52 +0000 UTC" firstStartedPulling="2025-12-05 19:33:03.782031819 +0000 UTC m=+1162.663917814" lastFinishedPulling="2025-12-05 19:33:51.2015202 +0000 UTC m=+1210.083406195" observedRunningTime="2025-12-05 19:33:52.367296493 +0000 UTC m=+1211.249182499" watchObservedRunningTime="2025-12-05 19:33:52.402279027 +0000 UTC m=+1211.284165032" Dec 05 19:33:52 crc kubenswrapper[4982]: I1205 19:33:52.764892 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 19:33:52 crc kubenswrapper[4982]: I1205 19:33:52.845266 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.748917 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.856992 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-dispersionconf\") pod \"155eecea-ebae-400d-a81e-1d28392b290e\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.857097 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-combined-ca-bundle\") pod \"155eecea-ebae-400d-a81e-1d28392b290e\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.857163 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhr22\" (UniqueName: \"kubernetes.io/projected/155eecea-ebae-400d-a81e-1d28392b290e-kube-api-access-rhr22\") pod \"155eecea-ebae-400d-a81e-1d28392b290e\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.857204 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/155eecea-ebae-400d-a81e-1d28392b290e-etc-swift\") pod \"155eecea-ebae-400d-a81e-1d28392b290e\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.857330 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/155eecea-ebae-400d-a81e-1d28392b290e-ring-data-devices\") pod \"155eecea-ebae-400d-a81e-1d28392b290e\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.858173 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/155eecea-ebae-400d-a81e-1d28392b290e-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "155eecea-ebae-400d-a81e-1d28392b290e" (UID: "155eecea-ebae-400d-a81e-1d28392b290e"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.858536 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-swiftconf\") pod \"155eecea-ebae-400d-a81e-1d28392b290e\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.859667 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/155eecea-ebae-400d-a81e-1d28392b290e-scripts\") pod \"155eecea-ebae-400d-a81e-1d28392b290e\" (UID: \"155eecea-ebae-400d-a81e-1d28392b290e\") " Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.859710 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/155eecea-ebae-400d-a81e-1d28392b290e-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "155eecea-ebae-400d-a81e-1d28392b290e" (UID: "155eecea-ebae-400d-a81e-1d28392b290e"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.860771 4982 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/155eecea-ebae-400d-a81e-1d28392b290e-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.860792 4982 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/155eecea-ebae-400d-a81e-1d28392b290e-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.864069 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/155eecea-ebae-400d-a81e-1d28392b290e-kube-api-access-rhr22" (OuterVolumeSpecName: "kube-api-access-rhr22") pod "155eecea-ebae-400d-a81e-1d28392b290e" (UID: "155eecea-ebae-400d-a81e-1d28392b290e"). InnerVolumeSpecName "kube-api-access-rhr22". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.867217 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "155eecea-ebae-400d-a81e-1d28392b290e" (UID: "155eecea-ebae-400d-a81e-1d28392b290e"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.885022 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "155eecea-ebae-400d-a81e-1d28392b290e" (UID: "155eecea-ebae-400d-a81e-1d28392b290e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.887607 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "155eecea-ebae-400d-a81e-1d28392b290e" (UID: "155eecea-ebae-400d-a81e-1d28392b290e"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.892108 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/155eecea-ebae-400d-a81e-1d28392b290e-scripts" (OuterVolumeSpecName: "scripts") pod "155eecea-ebae-400d-a81e-1d28392b290e" (UID: "155eecea-ebae-400d-a81e-1d28392b290e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.898532 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.898609 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.916978 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.966614 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/155eecea-ebae-400d-a81e-1d28392b290e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.966918 4982 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.966933 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.966950 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhr22\" (UniqueName: \"kubernetes.io/projected/155eecea-ebae-400d-a81e-1d28392b290e-kube-api-access-rhr22\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:53 crc kubenswrapper[4982]: I1205 19:33:53.966966 4982 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/155eecea-ebae-400d-a81e-1d28392b290e-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:54 crc kubenswrapper[4982]: I1205 19:33:54.356638 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-hrhl7" Dec 05 19:33:54 crc kubenswrapper[4982]: I1205 19:33:54.357867 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-hrhl7" event={"ID":"155eecea-ebae-400d-a81e-1d28392b290e","Type":"ContainerDied","Data":"acc96afda50e9c1f54e3ce9eb9da42379921927977977b4640eec2a321501a9f"} Dec 05 19:33:54 crc kubenswrapper[4982]: I1205 19:33:54.357973 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="acc96afda50e9c1f54e3ce9eb9da42379921927977977b4640eec2a321501a9f" Dec 05 19:33:54 crc kubenswrapper[4982]: I1205 19:33:54.358329 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:55 crc kubenswrapper[4982]: I1205 19:33:55.812073 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="85a8d068-da43-4ed2-879a-281872eab097" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 05 19:33:57 crc kubenswrapper[4982]: I1205 19:33:57.892701 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 19:33:57 crc kubenswrapper[4982]: I1205 19:33:57.893408 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="prometheus" containerID="cri-o://ea1d536fde382cd9a652650c7cd67d90386f7d15a08b1066a52fcd7f98c0dfbb" gracePeriod=600 Dec 05 19:33:57 crc kubenswrapper[4982]: I1205 19:33:57.893572 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="config-reloader" containerID="cri-o://40aa78d8b657813f6b30a70d50a63cb6c41159a99808e9e9ed859cc153811fcc" gracePeriod=600 Dec 05 19:33:57 crc kubenswrapper[4982]: I1205 19:33:57.893536 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="thanos-sidecar" containerID="cri-o://a8aa7d5a95add3263ebfc5afbc76a9f0f683006b316a10fd172325616e0290d8" gracePeriod=600 Dec 05 19:33:58 crc kubenswrapper[4982]: I1205 19:33:58.393935 4982 generic.go:334] "Generic (PLEG): container finished" podID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerID="a8aa7d5a95add3263ebfc5afbc76a9f0f683006b316a10fd172325616e0290d8" exitCode=0 Dec 05 19:33:58 crc kubenswrapper[4982]: I1205 19:33:58.394258 4982 generic.go:334] "Generic (PLEG): container finished" podID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerID="40aa78d8b657813f6b30a70d50a63cb6c41159a99808e9e9ed859cc153811fcc" exitCode=0 Dec 05 19:33:58 crc kubenswrapper[4982]: I1205 19:33:58.394266 4982 generic.go:334] "Generic (PLEG): container finished" podID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerID="ea1d536fde382cd9a652650c7cd67d90386f7d15a08b1066a52fcd7f98c0dfbb" exitCode=0 Dec 05 19:33:58 crc kubenswrapper[4982]: I1205 19:33:58.393965 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce039d16-a4f9-40f3-9398-f6c2efc89b41","Type":"ContainerDied","Data":"a8aa7d5a95add3263ebfc5afbc76a9f0f683006b316a10fd172325616e0290d8"} Dec 05 19:33:58 crc kubenswrapper[4982]: I1205 19:33:58.394297 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce039d16-a4f9-40f3-9398-f6c2efc89b41","Type":"ContainerDied","Data":"40aa78d8b657813f6b30a70d50a63cb6c41159a99808e9e9ed859cc153811fcc"} Dec 05 19:33:58 crc kubenswrapper[4982]: I1205 19:33:58.394313 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce039d16-a4f9-40f3-9398-f6c2efc89b41","Type":"ContainerDied","Data":"ea1d536fde382cd9a652650c7cd67d90386f7d15a08b1066a52fcd7f98c0dfbb"} Dec 05 19:33:58 crc kubenswrapper[4982]: I1205 19:33:58.914472 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.083243 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\") pod \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.083295 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ce039d16-a4f9-40f3-9398-f6c2efc89b41-tls-assets\") pod \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.083358 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ce039d16-a4f9-40f3-9398-f6c2efc89b41-config-out\") pod \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.083384 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-config\") pod \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.083411 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-thanos-prometheus-http-client-file\") pod \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.083434 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/ce039d16-a4f9-40f3-9398-f6c2efc89b41-prometheus-metric-storage-rulefiles-0\") pod \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.083575 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-web-config\") pod \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.083593 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfp64\" (UniqueName: \"kubernetes.io/projected/ce039d16-a4f9-40f3-9398-f6c2efc89b41-kube-api-access-wfp64\") pod \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\" (UID: \"ce039d16-a4f9-40f3-9398-f6c2efc89b41\") " Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.084743 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ce039d16-a4f9-40f3-9398-f6c2efc89b41-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "ce039d16-a4f9-40f3-9398-f6c2efc89b41" (UID: "ce039d16-a4f9-40f3-9398-f6c2efc89b41"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.093844 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce039d16-a4f9-40f3-9398-f6c2efc89b41-kube-api-access-wfp64" (OuterVolumeSpecName: "kube-api-access-wfp64") pod "ce039d16-a4f9-40f3-9398-f6c2efc89b41" (UID: "ce039d16-a4f9-40f3-9398-f6c2efc89b41"). InnerVolumeSpecName "kube-api-access-wfp64". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.095079 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "ce039d16-a4f9-40f3-9398-f6c2efc89b41" (UID: "ce039d16-a4f9-40f3-9398-f6c2efc89b41"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.095860 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce039d16-a4f9-40f3-9398-f6c2efc89b41-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "ce039d16-a4f9-40f3-9398-f6c2efc89b41" (UID: "ce039d16-a4f9-40f3-9398-f6c2efc89b41"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.096747 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-config" (OuterVolumeSpecName: "config") pod "ce039d16-a4f9-40f3-9398-f6c2efc89b41" (UID: "ce039d16-a4f9-40f3-9398-f6c2efc89b41"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.109760 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce039d16-a4f9-40f3-9398-f6c2efc89b41-config-out" (OuterVolumeSpecName: "config-out") pod "ce039d16-a4f9-40f3-9398-f6c2efc89b41" (UID: "ce039d16-a4f9-40f3-9398-f6c2efc89b41"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.117856 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-web-config" (OuterVolumeSpecName: "web-config") pod "ce039d16-a4f9-40f3-9398-f6c2efc89b41" (UID: "ce039d16-a4f9-40f3-9398-f6c2efc89b41"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.122054 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "ce039d16-a4f9-40f3-9398-f6c2efc89b41" (UID: "ce039d16-a4f9-40f3-9398-f6c2efc89b41"). InnerVolumeSpecName "pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.186850 4982 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-web-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.186903 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfp64\" (UniqueName: \"kubernetes.io/projected/ce039d16-a4f9-40f3-9398-f6c2efc89b41-kube-api-access-wfp64\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.186957 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\") on node \"crc\" " Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.186970 4982 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/ce039d16-a4f9-40f3-9398-f6c2efc89b41-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.186979 4982 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/ce039d16-a4f9-40f3-9398-f6c2efc89b41-config-out\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.186987 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.186996 4982 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/ce039d16-a4f9-40f3-9398-f6c2efc89b41-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.187005 4982 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/ce039d16-a4f9-40f3-9398-f6c2efc89b41-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.206755 4982 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.206935 4982 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f") on node "crc" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.288561 4982 reconciler_common.go:293] "Volume detached for volume \"pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\") on node \"crc\" DevicePath \"\"" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.458273 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"ce039d16-a4f9-40f3-9398-f6c2efc89b41","Type":"ContainerDied","Data":"f2399afd69ff0155a12e681dbd5083243a462ae4fef7c40d1deee94ce050725f"} Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.458328 4982 scope.go:117] "RemoveContainer" containerID="a8aa7d5a95add3263ebfc5afbc76a9f0f683006b316a10fd172325616e0290d8" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.458477 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.483495 4982 scope.go:117] "RemoveContainer" containerID="40aa78d8b657813f6b30a70d50a63cb6c41159a99808e9e9ed859cc153811fcc" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.500634 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.507687 4982 scope.go:117] "RemoveContainer" containerID="ea1d536fde382cd9a652650c7cd67d90386f7d15a08b1066a52fcd7f98c0dfbb" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.507974 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.525365 4982 scope.go:117] "RemoveContainer" containerID="e2b5d627abb81fcf217497bed705f68b49a9e541aa87fc9be4ce37ceb9eae824" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540056 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 19:33:59 crc kubenswrapper[4982]: E1205 19:33:59.540488 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37eb941f-a0a5-4f0f-9504-6c07ec6535a0" containerName="dnsmasq-dns" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540511 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="37eb941f-a0a5-4f0f-9504-6c07ec6535a0" containerName="dnsmasq-dns" Dec 05 19:33:59 crc kubenswrapper[4982]: E1205 19:33:59.540534 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="155eecea-ebae-400d-a81e-1d28392b290e" containerName="swift-ring-rebalance" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540543 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="155eecea-ebae-400d-a81e-1d28392b290e" containerName="swift-ring-rebalance" Dec 05 19:33:59 crc kubenswrapper[4982]: E1205 19:33:59.540570 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="thanos-sidecar" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540578 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="thanos-sidecar" Dec 05 19:33:59 crc kubenswrapper[4982]: E1205 19:33:59.540589 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="prometheus" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540596 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="prometheus" Dec 05 19:33:59 crc kubenswrapper[4982]: E1205 19:33:59.540605 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="config-reloader" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540612 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="config-reloader" Dec 05 19:33:59 crc kubenswrapper[4982]: E1205 19:33:59.540632 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37eb941f-a0a5-4f0f-9504-6c07ec6535a0" containerName="init" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540642 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="37eb941f-a0a5-4f0f-9504-6c07ec6535a0" containerName="init" Dec 05 19:33:59 crc kubenswrapper[4982]: E1205 19:33:59.540654 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="init-config-reloader" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540662 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="init-config-reloader" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540861 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="prometheus" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540882 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="config-reloader" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540898 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="thanos-sidecar" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540926 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="37eb941f-a0a5-4f0f-9504-6c07ec6535a0" containerName="dnsmasq-dns" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.540937 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="155eecea-ebae-400d-a81e-1d28392b290e" containerName="swift-ring-rebalance" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.542978 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.550462 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.550632 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.550742 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.550856 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.550967 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.553293 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-rvw5r" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.555248 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.566723 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.700214 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a65aa27d-1e48-4991-a573-68e9458e1733-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.700519 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a65aa27d-1e48-4991-a573-68e9458e1733-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.700540 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.700582 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.700615 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.700639 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-config\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.700660 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.700717 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.700742 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrdvz\" (UniqueName: \"kubernetes.io/projected/a65aa27d-1e48-4991-a573-68e9458e1733-kube-api-access-xrdvz\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.700775 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a65aa27d-1e48-4991-a573-68e9458e1733-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.700838 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.802240 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.802305 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a65aa27d-1e48-4991-a573-68e9458e1733-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.802337 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.802354 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a65aa27d-1e48-4991-a573-68e9458e1733-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.802388 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.802427 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.802453 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-config\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.803249 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.803580 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.803750 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrdvz\" (UniqueName: \"kubernetes.io/projected/a65aa27d-1e48-4991-a573-68e9458e1733-kube-api-access-xrdvz\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.804219 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a65aa27d-1e48-4991-a573-68e9458e1733-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.805095 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.805299 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/6dc18053a3b2267841393e5d13e7583bb9f8943657ae1a7f2ce4d8e6481b9d52/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.805167 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/a65aa27d-1e48-4991-a573-68e9458e1733-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.807012 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-config\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.807083 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.807502 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.808180 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.808359 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/a65aa27d-1e48-4991-a573-68e9458e1733-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.809915 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.812525 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/a65aa27d-1e48-4991-a573-68e9458e1733-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.815995 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/a65aa27d-1e48-4991-a573-68e9458e1733-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.822265 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrdvz\" (UniqueName: \"kubernetes.io/projected/a65aa27d-1e48-4991-a573-68e9458e1733-kube-api-access-xrdvz\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.841619 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2902b09d-64b5-4bc5-b317-2aee9a0bac9f\") pod \"prometheus-metric-storage-0\" (UID: \"a65aa27d-1e48-4991-a573-68e9458e1733\") " pod="openstack/prometheus-metric-storage-0" Dec 05 19:33:59 crc kubenswrapper[4982]: I1205 19:33:59.900177 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.206842 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-c3f9-account-create-update-vnhdf"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.208419 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c3f9-account-create-update-vnhdf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.213772 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.219049 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c3f9-account-create-update-vnhdf"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.269578 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-nnd82"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.270899 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nnd82" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.285707 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-nnd82"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.312212 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a08add5-9ad7-41eb-abfb-7786db71c537-operator-scripts\") pod \"keystone-c3f9-account-create-update-vnhdf\" (UID: \"4a08add5-9ad7-41eb-abfb-7786db71c537\") " pod="openstack/keystone-c3f9-account-create-update-vnhdf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.312345 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9495\" (UniqueName: \"kubernetes.io/projected/4a08add5-9ad7-41eb-abfb-7786db71c537-kube-api-access-f9495\") pod \"keystone-c3f9-account-create-update-vnhdf\" (UID: \"4a08add5-9ad7-41eb-abfb-7786db71c537\") " pod="openstack/keystone-c3f9-account-create-update-vnhdf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.414820 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9495\" (UniqueName: \"kubernetes.io/projected/4a08add5-9ad7-41eb-abfb-7786db71c537-kube-api-access-f9495\") pod \"keystone-c3f9-account-create-update-vnhdf\" (UID: \"4a08add5-9ad7-41eb-abfb-7786db71c537\") " pod="openstack/keystone-c3f9-account-create-update-vnhdf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.414903 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c27964fd-9fbf-40dd-b25a-91d715bd8ff4-operator-scripts\") pod \"keystone-db-create-nnd82\" (UID: \"c27964fd-9fbf-40dd-b25a-91d715bd8ff4\") " pod="openstack/keystone-db-create-nnd82" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.414962 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a08add5-9ad7-41eb-abfb-7786db71c537-operator-scripts\") pod \"keystone-c3f9-account-create-update-vnhdf\" (UID: \"4a08add5-9ad7-41eb-abfb-7786db71c537\") " pod="openstack/keystone-c3f9-account-create-update-vnhdf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.414990 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t4psw\" (UniqueName: \"kubernetes.io/projected/c27964fd-9fbf-40dd-b25a-91d715bd8ff4-kube-api-access-t4psw\") pod \"keystone-db-create-nnd82\" (UID: \"c27964fd-9fbf-40dd-b25a-91d715bd8ff4\") " pod="openstack/keystone-db-create-nnd82" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.415912 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.416109 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a08add5-9ad7-41eb-abfb-7786db71c537-operator-scripts\") pod \"keystone-c3f9-account-create-update-vnhdf\" (UID: \"4a08add5-9ad7-41eb-abfb-7786db71c537\") " pod="openstack/keystone-c3f9-account-create-update-vnhdf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.442055 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9495\" (UniqueName: \"kubernetes.io/projected/4a08add5-9ad7-41eb-abfb-7786db71c537-kube-api-access-f9495\") pod \"keystone-c3f9-account-create-update-vnhdf\" (UID: \"4a08add5-9ad7-41eb-abfb-7786db71c537\") " pod="openstack/keystone-c3f9-account-create-update-vnhdf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.452982 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-nq9zf"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.454885 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nq9zf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.469545 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-nq9zf"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.478122 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a65aa27d-1e48-4991-a573-68e9458e1733","Type":"ContainerStarted","Data":"c1a498b51e04679923d23e005f1348264fbb108b3325264e027c3c5435996102"} Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.516889 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c27964fd-9fbf-40dd-b25a-91d715bd8ff4-operator-scripts\") pod \"keystone-db-create-nnd82\" (UID: \"c27964fd-9fbf-40dd-b25a-91d715bd8ff4\") " pod="openstack/keystone-db-create-nnd82" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.516962 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t4psw\" (UniqueName: \"kubernetes.io/projected/c27964fd-9fbf-40dd-b25a-91d715bd8ff4-kube-api-access-t4psw\") pod \"keystone-db-create-nnd82\" (UID: \"c27964fd-9fbf-40dd-b25a-91d715bd8ff4\") " pod="openstack/keystone-db-create-nnd82" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.520307 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c27964fd-9fbf-40dd-b25a-91d715bd8ff4-operator-scripts\") pod \"keystone-db-create-nnd82\" (UID: \"c27964fd-9fbf-40dd-b25a-91d715bd8ff4\") " pod="openstack/keystone-db-create-nnd82" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.528078 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c3f9-account-create-update-vnhdf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.535070 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t4psw\" (UniqueName: \"kubernetes.io/projected/c27964fd-9fbf-40dd-b25a-91d715bd8ff4-kube-api-access-t4psw\") pod \"keystone-db-create-nnd82\" (UID: \"c27964fd-9fbf-40dd-b25a-91d715bd8ff4\") " pod="openstack/keystone-db-create-nnd82" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.563869 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-eb3d-account-create-update-6tb4h"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.565263 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-eb3d-account-create-update-6tb4h" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.570853 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.574516 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-eb3d-account-create-update-6tb4h"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.588085 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nnd82" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.618435 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a5c0c12-1041-4859-94dd-8b5291a4f2ec-operator-scripts\") pod \"placement-db-create-nq9zf\" (UID: \"2a5c0c12-1041-4859-94dd-8b5291a4f2ec\") " pod="openstack/placement-db-create-nq9zf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.618735 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wn2jx\" (UniqueName: \"kubernetes.io/projected/2a5c0c12-1041-4859-94dd-8b5291a4f2ec-kube-api-access-wn2jx\") pod \"placement-db-create-nq9zf\" (UID: \"2a5c0c12-1041-4859-94dd-8b5291a4f2ec\") " pod="openstack/placement-db-create-nq9zf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.720491 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a-operator-scripts\") pod \"placement-eb3d-account-create-update-6tb4h\" (UID: \"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a\") " pod="openstack/placement-eb3d-account-create-update-6tb4h" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.720628 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m6z89\" (UniqueName: \"kubernetes.io/projected/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a-kube-api-access-m6z89\") pod \"placement-eb3d-account-create-update-6tb4h\" (UID: \"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a\") " pod="openstack/placement-eb3d-account-create-update-6tb4h" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.720681 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a5c0c12-1041-4859-94dd-8b5291a4f2ec-operator-scripts\") pod \"placement-db-create-nq9zf\" (UID: \"2a5c0c12-1041-4859-94dd-8b5291a4f2ec\") " pod="openstack/placement-db-create-nq9zf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.720719 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wn2jx\" (UniqueName: \"kubernetes.io/projected/2a5c0c12-1041-4859-94dd-8b5291a4f2ec-kube-api-access-wn2jx\") pod \"placement-db-create-nq9zf\" (UID: \"2a5c0c12-1041-4859-94dd-8b5291a4f2ec\") " pod="openstack/placement-db-create-nq9zf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.721942 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a5c0c12-1041-4859-94dd-8b5291a4f2ec-operator-scripts\") pod \"placement-db-create-nq9zf\" (UID: \"2a5c0c12-1041-4859-94dd-8b5291a4f2ec\") " pod="openstack/placement-db-create-nq9zf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.732599 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-6xbcw"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.735932 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6xbcw" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.749367 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wn2jx\" (UniqueName: \"kubernetes.io/projected/2a5c0c12-1041-4859-94dd-8b5291a4f2ec-kube-api-access-wn2jx\") pod \"placement-db-create-nq9zf\" (UID: \"2a5c0c12-1041-4859-94dd-8b5291a4f2ec\") " pod="openstack/placement-db-create-nq9zf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.756227 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-6xbcw"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.822356 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m6z89\" (UniqueName: \"kubernetes.io/projected/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a-kube-api-access-m6z89\") pod \"placement-eb3d-account-create-update-6tb4h\" (UID: \"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a\") " pod="openstack/placement-eb3d-account-create-update-6tb4h" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.822484 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a-operator-scripts\") pod \"placement-eb3d-account-create-update-6tb4h\" (UID: \"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a\") " pod="openstack/placement-eb3d-account-create-update-6tb4h" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.823352 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a-operator-scripts\") pod \"placement-eb3d-account-create-update-6tb4h\" (UID: \"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a\") " pod="openstack/placement-eb3d-account-create-update-6tb4h" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.846046 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m6z89\" (UniqueName: \"kubernetes.io/projected/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a-kube-api-access-m6z89\") pod \"placement-eb3d-account-create-update-6tb4h\" (UID: \"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a\") " pod="openstack/placement-eb3d-account-create-update-6tb4h" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.877472 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-56a8-account-create-update-cqrxv"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.879115 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-56a8-account-create-update-cqrxv" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.882435 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.888070 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-56a8-account-create-update-cqrxv"] Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.918044 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nq9zf" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.928464 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9z95\" (UniqueName: \"kubernetes.io/projected/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d-kube-api-access-q9z95\") pod \"glance-db-create-6xbcw\" (UID: \"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d\") " pod="openstack/glance-db-create-6xbcw" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.928790 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d-operator-scripts\") pod \"glance-db-create-6xbcw\" (UID: \"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d\") " pod="openstack/glance-db-create-6xbcw" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.951857 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-eb3d-account-create-update-6tb4h" Dec 05 19:34:00 crc kubenswrapper[4982]: I1205 19:34:00.971637 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.033522 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7180e9cd-c207-4b5f-984c-fff732e45b76-operator-scripts\") pod \"glance-56a8-account-create-update-cqrxv\" (UID: \"7180e9cd-c207-4b5f-984c-fff732e45b76\") " pod="openstack/glance-56a8-account-create-update-cqrxv" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.033666 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d-operator-scripts\") pod \"glance-db-create-6xbcw\" (UID: \"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d\") " pod="openstack/glance-db-create-6xbcw" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.033703 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9z95\" (UniqueName: \"kubernetes.io/projected/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d-kube-api-access-q9z95\") pod \"glance-db-create-6xbcw\" (UID: \"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d\") " pod="openstack/glance-db-create-6xbcw" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.033788 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxvq8\" (UniqueName: \"kubernetes.io/projected/7180e9cd-c207-4b5f-984c-fff732e45b76-kube-api-access-lxvq8\") pod \"glance-56a8-account-create-update-cqrxv\" (UID: \"7180e9cd-c207-4b5f-984c-fff732e45b76\") " pod="openstack/glance-56a8-account-create-update-cqrxv" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.034425 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d-operator-scripts\") pod \"glance-db-create-6xbcw\" (UID: \"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d\") " pod="openstack/glance-db-create-6xbcw" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.058595 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9z95\" (UniqueName: \"kubernetes.io/projected/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d-kube-api-access-q9z95\") pod \"glance-db-create-6xbcw\" (UID: \"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d\") " pod="openstack/glance-db-create-6xbcw" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.058939 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6xbcw" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.063284 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-c3f9-account-create-update-vnhdf"] Dec 05 19:34:01 crc kubenswrapper[4982]: W1205 19:34:01.086210 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a08add5_9ad7_41eb_abfb_7786db71c537.slice/crio-fe9a2767b014110ca75b2f1730a816bee1b25870a2be3eda6b49c47624c4bef7 WatchSource:0}: Error finding container fe9a2767b014110ca75b2f1730a816bee1b25870a2be3eda6b49c47624c4bef7: Status 404 returned error can't find the container with id fe9a2767b014110ca75b2f1730a816bee1b25870a2be3eda6b49c47624c4bef7 Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.135310 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxvq8\" (UniqueName: \"kubernetes.io/projected/7180e9cd-c207-4b5f-984c-fff732e45b76-kube-api-access-lxvq8\") pod \"glance-56a8-account-create-update-cqrxv\" (UID: \"7180e9cd-c207-4b5f-984c-fff732e45b76\") " pod="openstack/glance-56a8-account-create-update-cqrxv" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.135432 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7180e9cd-c207-4b5f-984c-fff732e45b76-operator-scripts\") pod \"glance-56a8-account-create-update-cqrxv\" (UID: \"7180e9cd-c207-4b5f-984c-fff732e45b76\") " pod="openstack/glance-56a8-account-create-update-cqrxv" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.136757 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7180e9cd-c207-4b5f-984c-fff732e45b76-operator-scripts\") pod \"glance-56a8-account-create-update-cqrxv\" (UID: \"7180e9cd-c207-4b5f-984c-fff732e45b76\") " pod="openstack/glance-56a8-account-create-update-cqrxv" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.164978 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxvq8\" (UniqueName: \"kubernetes.io/projected/7180e9cd-c207-4b5f-984c-fff732e45b76-kube-api-access-lxvq8\") pod \"glance-56a8-account-create-update-cqrxv\" (UID: \"7180e9cd-c207-4b5f-984c-fff732e45b76\") " pod="openstack/glance-56a8-account-create-update-cqrxv" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.200588 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-56a8-account-create-update-cqrxv" Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.208277 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-nnd82"] Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.419853 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" path="/var/lib/kubelet/pods/ce039d16-a4f9-40f3-9398-f6c2efc89b41/volumes" Dec 05 19:34:01 crc kubenswrapper[4982]: W1205 19:34:01.482726 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a5c0c12_1041_4859_94dd_8b5291a4f2ec.slice/crio-96ec1103ee9b9b928b6295c7868334c630b1d50a43506931045278b0c63b3df1 WatchSource:0}: Error finding container 96ec1103ee9b9b928b6295c7868334c630b1d50a43506931045278b0c63b3df1: Status 404 returned error can't find the container with id 96ec1103ee9b9b928b6295c7868334c630b1d50a43506931045278b0c63b3df1 Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.488763 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-nq9zf"] Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.491069 4982 generic.go:334] "Generic (PLEG): container finished" podID="145ed592-ad9f-487f-940e-71b78c2f48e1" containerID="25bd919fb6d1b5d277f9956e642210082f9f9a7875b1f375e7baa0bd19ce6bf9" exitCode=0 Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.491131 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"145ed592-ad9f-487f-940e-71b78c2f48e1","Type":"ContainerDied","Data":"25bd919fb6d1b5d277f9956e642210082f9f9a7875b1f375e7baa0bd19ce6bf9"} Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.494500 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c3f9-account-create-update-vnhdf" event={"ID":"4a08add5-9ad7-41eb-abfb-7786db71c537","Type":"ContainerStarted","Data":"fe9a2767b014110ca75b2f1730a816bee1b25870a2be3eda6b49c47624c4bef7"} Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.501697 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nnd82" event={"ID":"c27964fd-9fbf-40dd-b25a-91d715bd8ff4","Type":"ContainerStarted","Data":"757036e9a1505730eece332f47621fb4019a141d86e577d5786de3b1eb149766"} Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.553776 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-eb3d-account-create-update-6tb4h"] Dec 05 19:34:01 crc kubenswrapper[4982]: W1205 19:34:01.571377 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e4b2e94_26ac_4689_96d5_ac1ba1d35e7a.slice/crio-4bc78b5a54e4266d85ff1b7fc9ad26028bc2da60d86062bca653c73084e94206 WatchSource:0}: Error finding container 4bc78b5a54e4266d85ff1b7fc9ad26028bc2da60d86062bca653c73084e94206: Status 404 returned error can't find the container with id 4bc78b5a54e4266d85ff1b7fc9ad26028bc2da60d86062bca653c73084e94206 Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.707757 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-56a8-account-create-update-cqrxv"] Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.715383 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-6xbcw"] Dec 05 19:34:01 crc kubenswrapper[4982]: W1205 19:34:01.757728 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod242b9e30_d7a8_4ea5_8cd4_e87471ddb18d.slice/crio-87f8b659a57cd0122a59bff84b5e1dc0591ecbceaeb76d212ac3ab724d537c04 WatchSource:0}: Error finding container 87f8b659a57cd0122a59bff84b5e1dc0591ecbceaeb76d212ac3ab724d537c04: Status 404 returned error can't find the container with id 87f8b659a57cd0122a59bff84b5e1dc0591ecbceaeb76d212ac3ab724d537c04 Dec 05 19:34:01 crc kubenswrapper[4982]: W1205 19:34:01.827260 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7180e9cd_c207_4b5f_984c_fff732e45b76.slice/crio-4f6364a3dfe1482d2bf58cd23273be9d2b718837e67e70460afec4638fd868d4 WatchSource:0}: Error finding container 4f6364a3dfe1482d2bf58cd23273be9d2b718837e67e70460afec4638fd868d4: Status 404 returned error can't find the container with id 4f6364a3dfe1482d2bf58cd23273be9d2b718837e67e70460afec4638fd868d4 Dec 05 19:34:01 crc kubenswrapper[4982]: I1205 19:34:01.898233 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="ce039d16-a4f9-40f3-9398-f6c2efc89b41" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.113:9090/-/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.510500 4982 generic.go:334] "Generic (PLEG): container finished" podID="c27964fd-9fbf-40dd-b25a-91d715bd8ff4" containerID="a61367614156ba6f63ab4986bdefae65b372e34b00d50cc9ccfc6129241eb9e7" exitCode=0 Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.510569 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nnd82" event={"ID":"c27964fd-9fbf-40dd-b25a-91d715bd8ff4","Type":"ContainerDied","Data":"a61367614156ba6f63ab4986bdefae65b372e34b00d50cc9ccfc6129241eb9e7"} Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.514113 4982 generic.go:334] "Generic (PLEG): container finished" podID="242b9e30-d7a8-4ea5-8cd4-e87471ddb18d" containerID="8fbe20548c3b138be12db51c8be0dc00a7c522fe0ce5e28d6a3cfb791c1db8a2" exitCode=0 Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.514170 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6xbcw" event={"ID":"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d","Type":"ContainerDied","Data":"8fbe20548c3b138be12db51c8be0dc00a7c522fe0ce5e28d6a3cfb791c1db8a2"} Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.514198 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6xbcw" event={"ID":"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d","Type":"ContainerStarted","Data":"87f8b659a57cd0122a59bff84b5e1dc0591ecbceaeb76d212ac3ab724d537c04"} Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.515678 4982 generic.go:334] "Generic (PLEG): container finished" podID="7180e9cd-c207-4b5f-984c-fff732e45b76" containerID="8bf0b19bda090619d8454b2a11e60d57df8ec05560195a2cea2adb0f0af8c2db" exitCode=0 Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.515732 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-56a8-account-create-update-cqrxv" event={"ID":"7180e9cd-c207-4b5f-984c-fff732e45b76","Type":"ContainerDied","Data":"8bf0b19bda090619d8454b2a11e60d57df8ec05560195a2cea2adb0f0af8c2db"} Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.515752 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-56a8-account-create-update-cqrxv" event={"ID":"7180e9cd-c207-4b5f-984c-fff732e45b76","Type":"ContainerStarted","Data":"4f6364a3dfe1482d2bf58cd23273be9d2b718837e67e70460afec4638fd868d4"} Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.517822 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"145ed592-ad9f-487f-940e-71b78c2f48e1","Type":"ContainerStarted","Data":"1e27f3b142b77fb968498cdcc70ca2fef5015bc03c13dbb0ea0b79d0063bf8f5"} Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.518018 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.520770 4982 generic.go:334] "Generic (PLEG): container finished" podID="2a5c0c12-1041-4859-94dd-8b5291a4f2ec" containerID="9ce49f359f6f70f701594d5208cc9cfc2a3bcc5d382dff6f897bf14d5b23a6bb" exitCode=0 Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.520831 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nq9zf" event={"ID":"2a5c0c12-1041-4859-94dd-8b5291a4f2ec","Type":"ContainerDied","Data":"9ce49f359f6f70f701594d5208cc9cfc2a3bcc5d382dff6f897bf14d5b23a6bb"} Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.520851 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nq9zf" event={"ID":"2a5c0c12-1041-4859-94dd-8b5291a4f2ec","Type":"ContainerStarted","Data":"96ec1103ee9b9b928b6295c7868334c630b1d50a43506931045278b0c63b3df1"} Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.523023 4982 generic.go:334] "Generic (PLEG): container finished" podID="4a08add5-9ad7-41eb-abfb-7786db71c537" containerID="37f5cdca9ff9c30585429d7d35d8670e019e8ebf92231b63308bf8313e813a28" exitCode=0 Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.523073 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c3f9-account-create-update-vnhdf" event={"ID":"4a08add5-9ad7-41eb-abfb-7786db71c537","Type":"ContainerDied","Data":"37f5cdca9ff9c30585429d7d35d8670e019e8ebf92231b63308bf8313e813a28"} Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.524718 4982 generic.go:334] "Generic (PLEG): container finished" podID="7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a" containerID="d967c09c0e8353e004d0c09cda710e7f769f1cec1b8c1baa1ed0bd128f029399" exitCode=0 Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.524834 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-eb3d-account-create-update-6tb4h" event={"ID":"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a","Type":"ContainerDied","Data":"d967c09c0e8353e004d0c09cda710e7f769f1cec1b8c1baa1ed0bd128f029399"} Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.524924 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-eb3d-account-create-update-6tb4h" event={"ID":"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a","Type":"ContainerStarted","Data":"4bc78b5a54e4266d85ff1b7fc9ad26028bc2da60d86062bca653c73084e94206"} Dec 05 19:34:02 crc kubenswrapper[4982]: I1205 19:34:02.580565 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=58.418376893 podStartE2EDuration="1m17.580547569s" podCreationTimestamp="2025-12-05 19:32:45 +0000 UTC" firstStartedPulling="2025-12-05 19:33:03.826942369 +0000 UTC m=+1162.708828364" lastFinishedPulling="2025-12-05 19:33:22.989113045 +0000 UTC m=+1181.870999040" observedRunningTime="2025-12-05 19:34:02.575032581 +0000 UTC m=+1221.456918576" watchObservedRunningTime="2025-12-05 19:34:02.580547569 +0000 UTC m=+1221.462433584" Dec 05 19:34:03 crc kubenswrapper[4982]: I1205 19:34:03.534197 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a65aa27d-1e48-4991-a573-68e9458e1733","Type":"ContainerStarted","Data":"efae6af8ba75e3470068625b1b723ff84c3ffc675607432987e16064588e1666"} Dec 05 19:34:03 crc kubenswrapper[4982]: I1205 19:34:03.984337 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-56a8-account-create-update-cqrxv" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.103544 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7180e9cd-c207-4b5f-984c-fff732e45b76-operator-scripts\") pod \"7180e9cd-c207-4b5f-984c-fff732e45b76\" (UID: \"7180e9cd-c207-4b5f-984c-fff732e45b76\") " Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.103760 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxvq8\" (UniqueName: \"kubernetes.io/projected/7180e9cd-c207-4b5f-984c-fff732e45b76-kube-api-access-lxvq8\") pod \"7180e9cd-c207-4b5f-984c-fff732e45b76\" (UID: \"7180e9cd-c207-4b5f-984c-fff732e45b76\") " Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.104077 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7180e9cd-c207-4b5f-984c-fff732e45b76-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7180e9cd-c207-4b5f-984c-fff732e45b76" (UID: "7180e9cd-c207-4b5f-984c-fff732e45b76"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.104441 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7180e9cd-c207-4b5f-984c-fff732e45b76-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.110175 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7180e9cd-c207-4b5f-984c-fff732e45b76-kube-api-access-lxvq8" (OuterVolumeSpecName: "kube-api-access-lxvq8") pod "7180e9cd-c207-4b5f-984c-fff732e45b76" (UID: "7180e9cd-c207-4b5f-984c-fff732e45b76"). InnerVolumeSpecName "kube-api-access-lxvq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.206764 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxvq8\" (UniqueName: \"kubernetes.io/projected/7180e9cd-c207-4b5f-984c-fff732e45b76-kube-api-access-lxvq8\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.260655 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-eb3d-account-create-update-6tb4h" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.269306 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c3f9-account-create-update-vnhdf" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.277480 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6xbcw" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.287187 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nq9zf" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.296526 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nnd82" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.310041 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9z95\" (UniqueName: \"kubernetes.io/projected/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d-kube-api-access-q9z95\") pod \"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d\" (UID: \"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d\") " Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.310224 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d-operator-scripts\") pod \"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d\" (UID: \"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d\") " Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.310870 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "242b9e30-d7a8-4ea5-8cd4-e87471ddb18d" (UID: "242b9e30-d7a8-4ea5-8cd4-e87471ddb18d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.313204 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a-operator-scripts\") pod \"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a\" (UID: \"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a\") " Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.313318 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f9495\" (UniqueName: \"kubernetes.io/projected/4a08add5-9ad7-41eb-abfb-7786db71c537-kube-api-access-f9495\") pod \"4a08add5-9ad7-41eb-abfb-7786db71c537\" (UID: \"4a08add5-9ad7-41eb-abfb-7786db71c537\") " Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.313388 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m6z89\" (UniqueName: \"kubernetes.io/projected/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a-kube-api-access-m6z89\") pod \"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a\" (UID: \"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a\") " Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.313419 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a08add5-9ad7-41eb-abfb-7786db71c537-operator-scripts\") pod \"4a08add5-9ad7-41eb-abfb-7786db71c537\" (UID: \"4a08add5-9ad7-41eb-abfb-7786db71c537\") " Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.313498 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d-kube-api-access-q9z95" (OuterVolumeSpecName: "kube-api-access-q9z95") pod "242b9e30-d7a8-4ea5-8cd4-e87471ddb18d" (UID: "242b9e30-d7a8-4ea5-8cd4-e87471ddb18d"). InnerVolumeSpecName "kube-api-access-q9z95". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.314143 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a" (UID: "7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.314435 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a08add5-9ad7-41eb-abfb-7786db71c537-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4a08add5-9ad7-41eb-abfb-7786db71c537" (UID: "4a08add5-9ad7-41eb-abfb-7786db71c537"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.315166 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a08add5-9ad7-41eb-abfb-7786db71c537-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.315188 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9z95\" (UniqueName: \"kubernetes.io/projected/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d-kube-api-access-q9z95\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.315199 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.315208 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.318945 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a08add5-9ad7-41eb-abfb-7786db71c537-kube-api-access-f9495" (OuterVolumeSpecName: "kube-api-access-f9495") pod "4a08add5-9ad7-41eb-abfb-7786db71c537" (UID: "4a08add5-9ad7-41eb-abfb-7786db71c537"). InnerVolumeSpecName "kube-api-access-f9495". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.319891 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a-kube-api-access-m6z89" (OuterVolumeSpecName: "kube-api-access-m6z89") pod "7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a" (UID: "7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a"). InnerVolumeSpecName "kube-api-access-m6z89". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.416993 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wn2jx\" (UniqueName: \"kubernetes.io/projected/2a5c0c12-1041-4859-94dd-8b5291a4f2ec-kube-api-access-wn2jx\") pod \"2a5c0c12-1041-4859-94dd-8b5291a4f2ec\" (UID: \"2a5c0c12-1041-4859-94dd-8b5291a4f2ec\") " Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.417099 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a5c0c12-1041-4859-94dd-8b5291a4f2ec-operator-scripts\") pod \"2a5c0c12-1041-4859-94dd-8b5291a4f2ec\" (UID: \"2a5c0c12-1041-4859-94dd-8b5291a4f2ec\") " Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.417206 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c27964fd-9fbf-40dd-b25a-91d715bd8ff4-operator-scripts\") pod \"c27964fd-9fbf-40dd-b25a-91d715bd8ff4\" (UID: \"c27964fd-9fbf-40dd-b25a-91d715bd8ff4\") " Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.417260 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t4psw\" (UniqueName: \"kubernetes.io/projected/c27964fd-9fbf-40dd-b25a-91d715bd8ff4-kube-api-access-t4psw\") pod \"c27964fd-9fbf-40dd-b25a-91d715bd8ff4\" (UID: \"c27964fd-9fbf-40dd-b25a-91d715bd8ff4\") " Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.418275 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f9495\" (UniqueName: \"kubernetes.io/projected/4a08add5-9ad7-41eb-abfb-7786db71c537-kube-api-access-f9495\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.418306 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m6z89\" (UniqueName: \"kubernetes.io/projected/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a-kube-api-access-m6z89\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.419428 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c27964fd-9fbf-40dd-b25a-91d715bd8ff4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c27964fd-9fbf-40dd-b25a-91d715bd8ff4" (UID: "c27964fd-9fbf-40dd-b25a-91d715bd8ff4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.419483 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a5c0c12-1041-4859-94dd-8b5291a4f2ec-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2a5c0c12-1041-4859-94dd-8b5291a4f2ec" (UID: "2a5c0c12-1041-4859-94dd-8b5291a4f2ec"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.421592 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c27964fd-9fbf-40dd-b25a-91d715bd8ff4-kube-api-access-t4psw" (OuterVolumeSpecName: "kube-api-access-t4psw") pod "c27964fd-9fbf-40dd-b25a-91d715bd8ff4" (UID: "c27964fd-9fbf-40dd-b25a-91d715bd8ff4"). InnerVolumeSpecName "kube-api-access-t4psw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.422024 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a5c0c12-1041-4859-94dd-8b5291a4f2ec-kube-api-access-wn2jx" (OuterVolumeSpecName: "kube-api-access-wn2jx") pod "2a5c0c12-1041-4859-94dd-8b5291a4f2ec" (UID: "2a5c0c12-1041-4859-94dd-8b5291a4f2ec"). InnerVolumeSpecName "kube-api-access-wn2jx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.520329 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2a5c0c12-1041-4859-94dd-8b5291a4f2ec-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.520378 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c27964fd-9fbf-40dd-b25a-91d715bd8ff4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.520392 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t4psw\" (UniqueName: \"kubernetes.io/projected/c27964fd-9fbf-40dd-b25a-91d715bd8ff4-kube-api-access-t4psw\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.520406 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wn2jx\" (UniqueName: \"kubernetes.io/projected/2a5c0c12-1041-4859-94dd-8b5291a4f2ec-kube-api-access-wn2jx\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.545989 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-56a8-account-create-update-cqrxv" event={"ID":"7180e9cd-c207-4b5f-984c-fff732e45b76","Type":"ContainerDied","Data":"4f6364a3dfe1482d2bf58cd23273be9d2b718837e67e70460afec4638fd868d4"} Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.546034 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f6364a3dfe1482d2bf58cd23273be9d2b718837e67e70460afec4638fd868d4" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.546114 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-56a8-account-create-update-cqrxv" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.548575 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nq9zf" event={"ID":"2a5c0c12-1041-4859-94dd-8b5291a4f2ec","Type":"ContainerDied","Data":"96ec1103ee9b9b928b6295c7868334c630b1d50a43506931045278b0c63b3df1"} Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.548652 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="96ec1103ee9b9b928b6295c7868334c630b1d50a43506931045278b0c63b3df1" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.548747 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nq9zf" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.552137 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-c3f9-account-create-update-vnhdf" event={"ID":"4a08add5-9ad7-41eb-abfb-7786db71c537","Type":"ContainerDied","Data":"fe9a2767b014110ca75b2f1730a816bee1b25870a2be3eda6b49c47624c4bef7"} Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.552226 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe9a2767b014110ca75b2f1730a816bee1b25870a2be3eda6b49c47624c4bef7" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.552311 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-c3f9-account-create-update-vnhdf" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.556475 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-eb3d-account-create-update-6tb4h" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.556474 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-eb3d-account-create-update-6tb4h" event={"ID":"7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a","Type":"ContainerDied","Data":"4bc78b5a54e4266d85ff1b7fc9ad26028bc2da60d86062bca653c73084e94206"} Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.556601 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4bc78b5a54e4266d85ff1b7fc9ad26028bc2da60d86062bca653c73084e94206" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.557761 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-nnd82" event={"ID":"c27964fd-9fbf-40dd-b25a-91d715bd8ff4","Type":"ContainerDied","Data":"757036e9a1505730eece332f47621fb4019a141d86e577d5786de3b1eb149766"} Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.557780 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="757036e9a1505730eece332f47621fb4019a141d86e577d5786de3b1eb149766" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.557832 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-nnd82" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.559907 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-6xbcw" event={"ID":"242b9e30-d7a8-4ea5-8cd4-e87471ddb18d","Type":"ContainerDied","Data":"87f8b659a57cd0122a59bff84b5e1dc0591ecbceaeb76d212ac3ab724d537c04"} Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.559972 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87f8b659a57cd0122a59bff84b5e1dc0591ecbceaeb76d212ac3ab724d537c04" Dec 05 19:34:04 crc kubenswrapper[4982]: I1205 19:34:04.559924 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-6xbcw" Dec 05 19:34:05 crc kubenswrapper[4982]: I1205 19:34:05.569666 4982 generic.go:334] "Generic (PLEG): container finished" podID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" containerID="e0e667bb7326f9117e021442929b3a4f8127da2964721e14c6fe962fcb1c5cf5" exitCode=0 Dec 05 19:34:05 crc kubenswrapper[4982]: I1205 19:34:05.569746 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2c4d593c-5baa-4b09-b586-7b0e65acaa73","Type":"ContainerDied","Data":"e0e667bb7326f9117e021442929b3a4f8127da2964721e14c6fe962fcb1c5cf5"} Dec 05 19:34:05 crc kubenswrapper[4982]: I1205 19:34:05.640834 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:34:05 crc kubenswrapper[4982]: I1205 19:34:05.646573 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/03bef3aa-7dbf-41c2-8754-7be39af98913-etc-swift\") pod \"swift-storage-0\" (UID: \"03bef3aa-7dbf-41c2-8754-7be39af98913\") " pod="openstack/swift-storage-0" Dec 05 19:34:05 crc kubenswrapper[4982]: I1205 19:34:05.692581 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 19:34:05 crc kubenswrapper[4982]: I1205 19:34:05.812409 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="85a8d068-da43-4ed2-879a-281872eab097" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.043511 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-jbfm4"] Dec 05 19:34:06 crc kubenswrapper[4982]: E1205 19:34:06.051494 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="242b9e30-d7a8-4ea5-8cd4-e87471ddb18d" containerName="mariadb-database-create" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.051518 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="242b9e30-d7a8-4ea5-8cd4-e87471ddb18d" containerName="mariadb-database-create" Dec 05 19:34:06 crc kubenswrapper[4982]: E1205 19:34:06.051535 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7180e9cd-c207-4b5f-984c-fff732e45b76" containerName="mariadb-account-create-update" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.051543 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7180e9cd-c207-4b5f-984c-fff732e45b76" containerName="mariadb-account-create-update" Dec 05 19:34:06 crc kubenswrapper[4982]: E1205 19:34:06.051556 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a" containerName="mariadb-account-create-update" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.051562 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a" containerName="mariadb-account-create-update" Dec 05 19:34:06 crc kubenswrapper[4982]: E1205 19:34:06.051574 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c27964fd-9fbf-40dd-b25a-91d715bd8ff4" containerName="mariadb-database-create" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.051580 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c27964fd-9fbf-40dd-b25a-91d715bd8ff4" containerName="mariadb-database-create" Dec 05 19:34:06 crc kubenswrapper[4982]: E1205 19:34:06.051591 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a08add5-9ad7-41eb-abfb-7786db71c537" containerName="mariadb-account-create-update" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.051597 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a08add5-9ad7-41eb-abfb-7786db71c537" containerName="mariadb-account-create-update" Dec 05 19:34:06 crc kubenswrapper[4982]: E1205 19:34:06.051603 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a5c0c12-1041-4859-94dd-8b5291a4f2ec" containerName="mariadb-database-create" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.051609 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a5c0c12-1041-4859-94dd-8b5291a4f2ec" containerName="mariadb-database-create" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.051750 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a5c0c12-1041-4859-94dd-8b5291a4f2ec" containerName="mariadb-database-create" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.051762 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a" containerName="mariadb-account-create-update" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.051782 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c27964fd-9fbf-40dd-b25a-91d715bd8ff4" containerName="mariadb-database-create" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.051791 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="242b9e30-d7a8-4ea5-8cd4-e87471ddb18d" containerName="mariadb-database-create" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.051801 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a08add5-9ad7-41eb-abfb-7786db71c537" containerName="mariadb-account-create-update" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.051809 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7180e9cd-c207-4b5f-984c-fff732e45b76" containerName="mariadb-account-create-update" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.052358 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-jbfm4"] Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.052447 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.054310 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.055264 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-77chn" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.149987 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-config-data\") pod \"glance-db-sync-jbfm4\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.150080 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-db-sync-config-data\") pod \"glance-db-sync-jbfm4\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.150225 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bb4jj\" (UniqueName: \"kubernetes.io/projected/a404304e-f5bf-4931-8577-9161a96cfd8d-kube-api-access-bb4jj\") pod \"glance-db-sync-jbfm4\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.150632 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-combined-ca-bundle\") pod \"glance-db-sync-jbfm4\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.252370 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-config-data\") pod \"glance-db-sync-jbfm4\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.252450 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-db-sync-config-data\") pod \"glance-db-sync-jbfm4\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.252484 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bb4jj\" (UniqueName: \"kubernetes.io/projected/a404304e-f5bf-4931-8577-9161a96cfd8d-kube-api-access-bb4jj\") pod \"glance-db-sync-jbfm4\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.252635 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-combined-ca-bundle\") pod \"glance-db-sync-jbfm4\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.258785 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-config-data\") pod \"glance-db-sync-jbfm4\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.260250 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-db-sync-config-data\") pod \"glance-db-sync-jbfm4\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.262058 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-combined-ca-bundle\") pod \"glance-db-sync-jbfm4\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.266475 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.282456 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bb4jj\" (UniqueName: \"kubernetes.io/projected/a404304e-f5bf-4931-8577-9161a96cfd8d-kube-api-access-bb4jj\") pod \"glance-db-sync-jbfm4\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.368475 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.581001 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"2f99ea94bd0183c1dce89d3599b2c33d9e959e7a3ce7a94590cae4ef10492549"} Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.584723 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2c4d593c-5baa-4b09-b586-7b0e65acaa73","Type":"ContainerStarted","Data":"6eb462887ee90c252433b9506716b48c1e28067938a942de21fc95c9dec55568"} Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.584960 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.617226 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371956.23757 podStartE2EDuration="1m20.617205322s" podCreationTimestamp="2025-12-05 19:32:46 +0000 UTC" firstStartedPulling="2025-12-05 19:33:03.760233005 +0000 UTC m=+1162.642119000" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:06.616983166 +0000 UTC m=+1225.498869171" watchObservedRunningTime="2025-12-05 19:34:06.617205322 +0000 UTC m=+1225.499091327" Dec 05 19:34:06 crc kubenswrapper[4982]: I1205 19:34:06.929397 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-jbfm4"] Dec 05 19:34:06 crc kubenswrapper[4982]: W1205 19:34:06.946288 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda404304e_f5bf_4931_8577_9161a96cfd8d.slice/crio-1a2241f85519afb205feeafd9177ebc861d0c3fad76788a063def5e6ff80d57d WatchSource:0}: Error finding container 1a2241f85519afb205feeafd9177ebc861d0c3fad76788a063def5e6ff80d57d: Status 404 returned error can't find the container with id 1a2241f85519afb205feeafd9177ebc861d0c3fad76788a063def5e6ff80d57d Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.043226 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-jhsjf" podUID="c4d66436-88ae-4023-9601-bd2aa6954667" containerName="ovn-controller" probeResult="failure" output=< Dec 05 19:34:07 crc kubenswrapper[4982]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 19:34:07 crc kubenswrapper[4982]: > Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.087118 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.093845 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-sct9d" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.311291 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jhsjf-config-np5bb"] Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.312682 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.314963 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.334784 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jhsjf-config-np5bb"] Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.372908 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-run\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.373083 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-log-ovn\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.373317 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lzhb\" (UniqueName: \"kubernetes.io/projected/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-kube-api-access-9lzhb\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.373386 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-scripts\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.373473 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-additional-scripts\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.373567 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-run-ovn\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.475093 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-run\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.475199 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-log-ovn\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.475337 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lzhb\" (UniqueName: \"kubernetes.io/projected/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-kube-api-access-9lzhb\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.475390 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-scripts\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.475411 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-additional-scripts\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.475439 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-run-ovn\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.475480 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-run\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.475590 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-log-ovn\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.476382 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-run-ovn\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.476397 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-additional-scripts\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.478658 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-scripts\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.496763 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lzhb\" (UniqueName: \"kubernetes.io/projected/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-kube-api-access-9lzhb\") pod \"ovn-controller-jhsjf-config-np5bb\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.596561 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jbfm4" event={"ID":"a404304e-f5bf-4931-8577-9161a96cfd8d","Type":"ContainerStarted","Data":"1a2241f85519afb205feeafd9177ebc861d0c3fad76788a063def5e6ff80d57d"} Dec 05 19:34:07 crc kubenswrapper[4982]: I1205 19:34:07.637532 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:08 crc kubenswrapper[4982]: I1205 19:34:08.264242 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jhsjf-config-np5bb"] Dec 05 19:34:08 crc kubenswrapper[4982]: W1205 19:34:08.281559 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5299aaa4_1d52_4d6e_b2b0_94fe054a1779.slice/crio-66e0a52e49ddd0d3fc1c0e57072a9ce41f74e12c397260c8e495255c5f3f70fe WatchSource:0}: Error finding container 66e0a52e49ddd0d3fc1c0e57072a9ce41f74e12c397260c8e495255c5f3f70fe: Status 404 returned error can't find the container with id 66e0a52e49ddd0d3fc1c0e57072a9ce41f74e12c397260c8e495255c5f3f70fe Dec 05 19:34:08 crc kubenswrapper[4982]: I1205 19:34:08.622768 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"0f17a1b1549545e93a8fc7e5dce3ec102cc42d2e7e6e007270f7ca4fcda5ae5f"} Dec 05 19:34:08 crc kubenswrapper[4982]: I1205 19:34:08.622818 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"790e48f188c9db2aaa12887f8a23b67e97f653a2d0510e93445ed0a6d2f767ba"} Dec 05 19:34:08 crc kubenswrapper[4982]: I1205 19:34:08.622832 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"138ff145366bbdafdd7562e6850f85956cf9fc1323980b385a501da39c193257"} Dec 05 19:34:08 crc kubenswrapper[4982]: I1205 19:34:08.623770 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jhsjf-config-np5bb" event={"ID":"5299aaa4-1d52-4d6e-b2b0-94fe054a1779","Type":"ContainerStarted","Data":"66e0a52e49ddd0d3fc1c0e57072a9ce41f74e12c397260c8e495255c5f3f70fe"} Dec 05 19:34:09 crc kubenswrapper[4982]: I1205 19:34:09.636494 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"97edcee5471f3316f015e5ee3d4a0d3bd2c57d03eddc208077cd7c4f53afc7fb"} Dec 05 19:34:09 crc kubenswrapper[4982]: I1205 19:34:09.640120 4982 generic.go:334] "Generic (PLEG): container finished" podID="5299aaa4-1d52-4d6e-b2b0-94fe054a1779" containerID="31449d61eb9812aaa9e01f33f8963cdbf64b5b3447951531b48886b715d5ac59" exitCode=0 Dec 05 19:34:09 crc kubenswrapper[4982]: I1205 19:34:09.640191 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jhsjf-config-np5bb" event={"ID":"5299aaa4-1d52-4d6e-b2b0-94fe054a1779","Type":"ContainerDied","Data":"31449d61eb9812aaa9e01f33f8963cdbf64b5b3447951531b48886b715d5ac59"} Dec 05 19:34:10 crc kubenswrapper[4982]: I1205 19:34:10.655757 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"fd77546bcc30bbc832c0dca4edda2453d9628f555631293fcb0a4cf57bf77b84"} Dec 05 19:34:10 crc kubenswrapper[4982]: I1205 19:34:10.656005 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"f8b8851f36a880ed87164f9fd0b0a2232eb5410ff428ffffbcdd1b452134df6b"} Dec 05 19:34:10 crc kubenswrapper[4982]: I1205 19:34:10.657098 4982 generic.go:334] "Generic (PLEG): container finished" podID="a65aa27d-1e48-4991-a573-68e9458e1733" containerID="efae6af8ba75e3470068625b1b723ff84c3ffc675607432987e16064588e1666" exitCode=0 Dec 05 19:34:10 crc kubenswrapper[4982]: I1205 19:34:10.657232 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a65aa27d-1e48-4991-a573-68e9458e1733","Type":"ContainerDied","Data":"efae6af8ba75e3470068625b1b723ff84c3ffc675607432987e16064588e1666"} Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.003254 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.135989 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-run\") pod \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.136122 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-run-ovn\") pod \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.136171 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-additional-scripts\") pod \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.136243 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9lzhb\" (UniqueName: \"kubernetes.io/projected/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-kube-api-access-9lzhb\") pod \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.136334 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-log-ovn\") pod \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.136372 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-scripts\") pod \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\" (UID: \"5299aaa4-1d52-4d6e-b2b0-94fe054a1779\") " Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.137110 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-run" (OuterVolumeSpecName: "var-run") pod "5299aaa4-1d52-4d6e-b2b0-94fe054a1779" (UID: "5299aaa4-1d52-4d6e-b2b0-94fe054a1779"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.137189 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "5299aaa4-1d52-4d6e-b2b0-94fe054a1779" (UID: "5299aaa4-1d52-4d6e-b2b0-94fe054a1779"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.137396 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "5299aaa4-1d52-4d6e-b2b0-94fe054a1779" (UID: "5299aaa4-1d52-4d6e-b2b0-94fe054a1779"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.137511 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "5299aaa4-1d52-4d6e-b2b0-94fe054a1779" (UID: "5299aaa4-1d52-4d6e-b2b0-94fe054a1779"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.138014 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-scripts" (OuterVolumeSpecName: "scripts") pod "5299aaa4-1d52-4d6e-b2b0-94fe054a1779" (UID: "5299aaa4-1d52-4d6e-b2b0-94fe054a1779"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.140549 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-kube-api-access-9lzhb" (OuterVolumeSpecName: "kube-api-access-9lzhb") pod "5299aaa4-1d52-4d6e-b2b0-94fe054a1779" (UID: "5299aaa4-1d52-4d6e-b2b0-94fe054a1779"). InnerVolumeSpecName "kube-api-access-9lzhb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.238085 4982 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.238123 4982 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.238139 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9lzhb\" (UniqueName: \"kubernetes.io/projected/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-kube-api-access-9lzhb\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.238165 4982 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.238178 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.238189 4982 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5299aaa4-1d52-4d6e-b2b0-94fe054a1779-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.673997 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a65aa27d-1e48-4991-a573-68e9458e1733","Type":"ContainerStarted","Data":"65e39a75480cde041f144c9fbf4536288256b269e513c9d80d9da4a504b24ae0"} Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.679752 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"88d70fe6b03469b8ef819ae21d236c60f0b943285330d6a54b47deee8fb68404"} Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.679822 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"df15c43c219ab200084ee6d4af1cabdab493ddcdcf7d2a5ec81cfece1b3726a8"} Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.681888 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jhsjf-config-np5bb" event={"ID":"5299aaa4-1d52-4d6e-b2b0-94fe054a1779","Type":"ContainerDied","Data":"66e0a52e49ddd0d3fc1c0e57072a9ce41f74e12c397260c8e495255c5f3f70fe"} Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.681931 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66e0a52e49ddd0d3fc1c0e57072a9ce41f74e12c397260c8e495255c5f3f70fe" Dec 05 19:34:11 crc kubenswrapper[4982]: I1205 19:34:11.682112 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jhsjf-config-np5bb" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.061759 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-jhsjf" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.120634 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jhsjf-config-np5bb"] Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.131231 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jhsjf-config-np5bb"] Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.210597 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-jhsjf-config-2dgv5"] Dec 05 19:34:12 crc kubenswrapper[4982]: E1205 19:34:12.210984 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5299aaa4-1d52-4d6e-b2b0-94fe054a1779" containerName="ovn-config" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.210999 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5299aaa4-1d52-4d6e-b2b0-94fe054a1779" containerName="ovn-config" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.212205 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5299aaa4-1d52-4d6e-b2b0-94fe054a1779" containerName="ovn-config" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.212836 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.215675 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.234576 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jhsjf-config-2dgv5"] Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.364834 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gqkn\" (UniqueName: \"kubernetes.io/projected/b3825032-58f5-4494-ba6f-984b83186c7b-kube-api-access-4gqkn\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.364912 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-run-ovn\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.364948 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3825032-58f5-4494-ba6f-984b83186c7b-scripts\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.364970 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-run\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.364986 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-log-ovn\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.365015 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b3825032-58f5-4494-ba6f-984b83186c7b-additional-scripts\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.466468 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-run-ovn\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.466536 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3825032-58f5-4494-ba6f-984b83186c7b-scripts\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.466572 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-run\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.466594 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-log-ovn\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.466638 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b3825032-58f5-4494-ba6f-984b83186c7b-additional-scripts\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.466747 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gqkn\" (UniqueName: \"kubernetes.io/projected/b3825032-58f5-4494-ba6f-984b83186c7b-kube-api-access-4gqkn\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.466813 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-run-ovn\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.466897 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-log-ovn\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.466935 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-run\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.468496 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b3825032-58f5-4494-ba6f-984b83186c7b-additional-scripts\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.469315 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3825032-58f5-4494-ba6f-984b83186c7b-scripts\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.557202 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.557464 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.557717 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.558709 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1a1fd81965ac1ad943b31d17af30468278dd74e344d34225c855144a8dd5abed"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.558769 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://1a1fd81965ac1ad943b31d17af30468278dd74e344d34225c855144a8dd5abed" gracePeriod=600 Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.837658 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gqkn\" (UniqueName: \"kubernetes.io/projected/b3825032-58f5-4494-ba6f-984b83186c7b-kube-api-access-4gqkn\") pod \"ovn-controller-jhsjf-config-2dgv5\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:12 crc kubenswrapper[4982]: I1205 19:34:12.865336 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:13 crc kubenswrapper[4982]: I1205 19:34:13.402823 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5299aaa4-1d52-4d6e-b2b0-94fe054a1779" path="/var/lib/kubelet/pods/5299aaa4-1d52-4d6e-b2b0-94fe054a1779/volumes" Dec 05 19:34:13 crc kubenswrapper[4982]: I1205 19:34:13.712346 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"21c66b026597ac7723da2d0376884e5bf50bb59fe10e78b38bf175e2c86cfc4f"} Dec 05 19:34:13 crc kubenswrapper[4982]: I1205 19:34:13.726709 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="1a1fd81965ac1ad943b31d17af30468278dd74e344d34225c855144a8dd5abed" exitCode=0 Dec 05 19:34:13 crc kubenswrapper[4982]: I1205 19:34:13.726801 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"1a1fd81965ac1ad943b31d17af30468278dd74e344d34225c855144a8dd5abed"} Dec 05 19:34:13 crc kubenswrapper[4982]: I1205 19:34:13.726885 4982 scope.go:117] "RemoveContainer" containerID="a3c43aed6bce4bef46a60d053a18694c6e39d12312a98b59d3e2035ce4e4dc6e" Dec 05 19:34:14 crc kubenswrapper[4982]: I1205 19:34:14.741962 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a65aa27d-1e48-4991-a573-68e9458e1733","Type":"ContainerStarted","Data":"883274a874076479fc342eb1ab4f7b520fc3705a534f7563f70d84ef2eb9609d"} Dec 05 19:34:15 crc kubenswrapper[4982]: I1205 19:34:15.813593 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 19:34:17 crc kubenswrapper[4982]: I1205 19:34:17.519420 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 19:34:17 crc kubenswrapper[4982]: I1205 19:34:17.635304 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.004383 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-7c96r"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.005947 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7c96r" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.013372 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-7c96r"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.066874 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-36e3-account-create-update-nrbz9"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.068354 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-36e3-account-create-update-nrbz9" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.070368 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4zmb\" (UniqueName: \"kubernetes.io/projected/31ed0f9e-af97-4842-8cbf-736e4d9d9a68-kube-api-access-g4zmb\") pod \"barbican-db-create-7c96r\" (UID: \"31ed0f9e-af97-4842-8cbf-736e4d9d9a68\") " pod="openstack/barbican-db-create-7c96r" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.070416 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31ed0f9e-af97-4842-8cbf-736e4d9d9a68-operator-scripts\") pod \"barbican-db-create-7c96r\" (UID: \"31ed0f9e-af97-4842-8cbf-736e4d9d9a68\") " pod="openstack/barbican-db-create-7c96r" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.074544 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.092350 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-36e3-account-create-update-nrbz9"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.117458 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-rhzlm"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.119342 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-rhzlm" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.156434 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-rhzlm"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.175297 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r794t\" (UniqueName: \"kubernetes.io/projected/505849cd-c790-4007-bea2-c6fef9b2fba4-kube-api-access-r794t\") pod \"cinder-db-create-rhzlm\" (UID: \"505849cd-c790-4007-bea2-c6fef9b2fba4\") " pod="openstack/cinder-db-create-rhzlm" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.175389 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a-operator-scripts\") pod \"cinder-36e3-account-create-update-nrbz9\" (UID: \"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a\") " pod="openstack/cinder-36e3-account-create-update-nrbz9" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.175426 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4zmb\" (UniqueName: \"kubernetes.io/projected/31ed0f9e-af97-4842-8cbf-736e4d9d9a68-kube-api-access-g4zmb\") pod \"barbican-db-create-7c96r\" (UID: \"31ed0f9e-af97-4842-8cbf-736e4d9d9a68\") " pod="openstack/barbican-db-create-7c96r" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.175448 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/505849cd-c790-4007-bea2-c6fef9b2fba4-operator-scripts\") pod \"cinder-db-create-rhzlm\" (UID: \"505849cd-c790-4007-bea2-c6fef9b2fba4\") " pod="openstack/cinder-db-create-rhzlm" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.175484 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31ed0f9e-af97-4842-8cbf-736e4d9d9a68-operator-scripts\") pod \"barbican-db-create-7c96r\" (UID: \"31ed0f9e-af97-4842-8cbf-736e4d9d9a68\") " pod="openstack/barbican-db-create-7c96r" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.175550 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl5f4\" (UniqueName: \"kubernetes.io/projected/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a-kube-api-access-dl5f4\") pod \"cinder-36e3-account-create-update-nrbz9\" (UID: \"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a\") " pod="openstack/cinder-36e3-account-create-update-nrbz9" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.177329 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31ed0f9e-af97-4842-8cbf-736e4d9d9a68-operator-scripts\") pod \"barbican-db-create-7c96r\" (UID: \"31ed0f9e-af97-4842-8cbf-736e4d9d9a68\") " pod="openstack/barbican-db-create-7c96r" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.204213 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-create-r4cm5"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.205497 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-r4cm5" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.208452 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4zmb\" (UniqueName: \"kubernetes.io/projected/31ed0f9e-af97-4842-8cbf-736e4d9d9a68-kube-api-access-g4zmb\") pod \"barbican-db-create-7c96r\" (UID: \"31ed0f9e-af97-4842-8cbf-736e4d9d9a68\") " pod="openstack/barbican-db-create-7c96r" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.214022 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-r4cm5"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.220269 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-13aa-account-create-update-hl7v8"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.221386 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.239705 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-db-secret" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.286735 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpqpr\" (UniqueName: \"kubernetes.io/projected/af444583-8979-46e6-adc3-83cc5d6fbbcf-kube-api-access-xpqpr\") pod \"cloudkitty-db-create-r4cm5\" (UID: \"af444583-8979-46e6-adc3-83cc5d6fbbcf\") " pod="openstack/cloudkitty-db-create-r4cm5" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.286793 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ts8mv\" (UniqueName: \"kubernetes.io/projected/8d61674f-fb7a-4bab-90ad-e4b6c22693ee-kube-api-access-ts8mv\") pod \"cloudkitty-13aa-account-create-update-hl7v8\" (UID: \"8d61674f-fb7a-4bab-90ad-e4b6c22693ee\") " pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.286836 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl5f4\" (UniqueName: \"kubernetes.io/projected/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a-kube-api-access-dl5f4\") pod \"cinder-36e3-account-create-update-nrbz9\" (UID: \"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a\") " pod="openstack/cinder-36e3-account-create-update-nrbz9" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.286878 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r794t\" (UniqueName: \"kubernetes.io/projected/505849cd-c790-4007-bea2-c6fef9b2fba4-kube-api-access-r794t\") pod \"cinder-db-create-rhzlm\" (UID: \"505849cd-c790-4007-bea2-c6fef9b2fba4\") " pod="openstack/cinder-db-create-rhzlm" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.286921 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af444583-8979-46e6-adc3-83cc5d6fbbcf-operator-scripts\") pod \"cloudkitty-db-create-r4cm5\" (UID: \"af444583-8979-46e6-adc3-83cc5d6fbbcf\") " pod="openstack/cloudkitty-db-create-r4cm5" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.286968 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a-operator-scripts\") pod \"cinder-36e3-account-create-update-nrbz9\" (UID: \"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a\") " pod="openstack/cinder-36e3-account-create-update-nrbz9" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.287007 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/505849cd-c790-4007-bea2-c6fef9b2fba4-operator-scripts\") pod \"cinder-db-create-rhzlm\" (UID: \"505849cd-c790-4007-bea2-c6fef9b2fba4\") " pod="openstack/cinder-db-create-rhzlm" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.287048 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d61674f-fb7a-4bab-90ad-e4b6c22693ee-operator-scripts\") pod \"cloudkitty-13aa-account-create-update-hl7v8\" (UID: \"8d61674f-fb7a-4bab-90ad-e4b6c22693ee\") " pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.315954 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/505849cd-c790-4007-bea2-c6fef9b2fba4-operator-scripts\") pod \"cinder-db-create-rhzlm\" (UID: \"505849cd-c790-4007-bea2-c6fef9b2fba4\") " pod="openstack/cinder-db-create-rhzlm" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.316835 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a-operator-scripts\") pod \"cinder-36e3-account-create-update-nrbz9\" (UID: \"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a\") " pod="openstack/cinder-36e3-account-create-update-nrbz9" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.324396 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-13aa-account-create-update-hl7v8"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.338912 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7c96r" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.339456 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r794t\" (UniqueName: \"kubernetes.io/projected/505849cd-c790-4007-bea2-c6fef9b2fba4-kube-api-access-r794t\") pod \"cinder-db-create-rhzlm\" (UID: \"505849cd-c790-4007-bea2-c6fef9b2fba4\") " pod="openstack/cinder-db-create-rhzlm" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.341181 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl5f4\" (UniqueName: \"kubernetes.io/projected/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a-kube-api-access-dl5f4\") pod \"cinder-36e3-account-create-update-nrbz9\" (UID: \"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a\") " pod="openstack/cinder-36e3-account-create-update-nrbz9" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.345123 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-dgbrk"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.348863 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.355380 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-dgbrk"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.359350 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.359583 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.359710 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.359878 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-knkd7" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.375598 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8572-account-create-update-ltmw6"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.379961 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8572-account-create-update-ltmw6" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.385567 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.386567 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-36e3-account-create-update-nrbz9" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.403778 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8572-account-create-update-ltmw6"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.416562 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af444583-8979-46e6-adc3-83cc5d6fbbcf-operator-scripts\") pod \"cloudkitty-db-create-r4cm5\" (UID: \"af444583-8979-46e6-adc3-83cc5d6fbbcf\") " pod="openstack/cloudkitty-db-create-r4cm5" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.416650 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d61674f-fb7a-4bab-90ad-e4b6c22693ee-operator-scripts\") pod \"cloudkitty-13aa-account-create-update-hl7v8\" (UID: \"8d61674f-fb7a-4bab-90ad-e4b6c22693ee\") " pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.416704 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpqpr\" (UniqueName: \"kubernetes.io/projected/af444583-8979-46e6-adc3-83cc5d6fbbcf-kube-api-access-xpqpr\") pod \"cloudkitty-db-create-r4cm5\" (UID: \"af444583-8979-46e6-adc3-83cc5d6fbbcf\") " pod="openstack/cloudkitty-db-create-r4cm5" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.416733 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ts8mv\" (UniqueName: \"kubernetes.io/projected/8d61674f-fb7a-4bab-90ad-e4b6c22693ee-kube-api-access-ts8mv\") pod \"cloudkitty-13aa-account-create-update-hl7v8\" (UID: \"8d61674f-fb7a-4bab-90ad-e4b6c22693ee\") " pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.417762 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af444583-8979-46e6-adc3-83cc5d6fbbcf-operator-scripts\") pod \"cloudkitty-db-create-r4cm5\" (UID: \"af444583-8979-46e6-adc3-83cc5d6fbbcf\") " pod="openstack/cloudkitty-db-create-r4cm5" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.418447 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d61674f-fb7a-4bab-90ad-e4b6c22693ee-operator-scripts\") pod \"cloudkitty-13aa-account-create-update-hl7v8\" (UID: \"8d61674f-fb7a-4bab-90ad-e4b6c22693ee\") " pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.438281 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpqpr\" (UniqueName: \"kubernetes.io/projected/af444583-8979-46e6-adc3-83cc5d6fbbcf-kube-api-access-xpqpr\") pod \"cloudkitty-db-create-r4cm5\" (UID: \"af444583-8979-46e6-adc3-83cc5d6fbbcf\") " pod="openstack/cloudkitty-db-create-r4cm5" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.455810 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ts8mv\" (UniqueName: \"kubernetes.io/projected/8d61674f-fb7a-4bab-90ad-e4b6c22693ee-kube-api-access-ts8mv\") pod \"cloudkitty-13aa-account-create-update-hl7v8\" (UID: \"8d61674f-fb7a-4bab-90ad-e4b6c22693ee\") " pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.459291 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-rhzlm" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.499179 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-n8vqc"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.503184 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-n8vqc" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.513560 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-91af-account-create-update-dkk49"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.515011 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-91af-account-create-update-dkk49" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.518095 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qpsb\" (UniqueName: \"kubernetes.io/projected/8bae30fe-731a-4097-a55f-11dd857f5986-kube-api-access-2qpsb\") pod \"barbican-8572-account-create-update-ltmw6\" (UID: \"8bae30fe-731a-4097-a55f-11dd857f5986\") " pod="openstack/barbican-8572-account-create-update-ltmw6" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.518203 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/666982dd-eefa-464c-ae19-7b0ffcabcf07-config-data\") pod \"keystone-db-sync-dgbrk\" (UID: \"666982dd-eefa-464c-ae19-7b0ffcabcf07\") " pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.518227 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clvtm\" (UniqueName: \"kubernetes.io/projected/666982dd-eefa-464c-ae19-7b0ffcabcf07-kube-api-access-clvtm\") pod \"keystone-db-sync-dgbrk\" (UID: \"666982dd-eefa-464c-ae19-7b0ffcabcf07\") " pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.518287 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/666982dd-eefa-464c-ae19-7b0ffcabcf07-combined-ca-bundle\") pod \"keystone-db-sync-dgbrk\" (UID: \"666982dd-eefa-464c-ae19-7b0ffcabcf07\") " pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.518361 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bae30fe-731a-4097-a55f-11dd857f5986-operator-scripts\") pod \"barbican-8572-account-create-update-ltmw6\" (UID: \"8bae30fe-731a-4097-a55f-11dd857f5986\") " pod="openstack/barbican-8572-account-create-update-ltmw6" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.521586 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.533178 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-91af-account-create-update-dkk49"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.549931 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-n8vqc"] Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.596771 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-r4cm5" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.620281 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-925mb\" (UniqueName: \"kubernetes.io/projected/7323378c-0ffc-47fa-88a2-666178163a52-kube-api-access-925mb\") pod \"neutron-91af-account-create-update-dkk49\" (UID: \"7323378c-0ffc-47fa-88a2-666178163a52\") " pod="openstack/neutron-91af-account-create-update-dkk49" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.620383 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qpsb\" (UniqueName: \"kubernetes.io/projected/8bae30fe-731a-4097-a55f-11dd857f5986-kube-api-access-2qpsb\") pod \"barbican-8572-account-create-update-ltmw6\" (UID: \"8bae30fe-731a-4097-a55f-11dd857f5986\") " pod="openstack/barbican-8572-account-create-update-ltmw6" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.620442 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/666982dd-eefa-464c-ae19-7b0ffcabcf07-config-data\") pod \"keystone-db-sync-dgbrk\" (UID: \"666982dd-eefa-464c-ae19-7b0ffcabcf07\") " pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.620960 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clvtm\" (UniqueName: \"kubernetes.io/projected/666982dd-eefa-464c-ae19-7b0ffcabcf07-kube-api-access-clvtm\") pod \"keystone-db-sync-dgbrk\" (UID: \"666982dd-eefa-464c-ae19-7b0ffcabcf07\") " pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.621354 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/666982dd-eefa-464c-ae19-7b0ffcabcf07-combined-ca-bundle\") pod \"keystone-db-sync-dgbrk\" (UID: \"666982dd-eefa-464c-ae19-7b0ffcabcf07\") " pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.621431 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7323378c-0ffc-47fa-88a2-666178163a52-operator-scripts\") pod \"neutron-91af-account-create-update-dkk49\" (UID: \"7323378c-0ffc-47fa-88a2-666178163a52\") " pod="openstack/neutron-91af-account-create-update-dkk49" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.621493 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8tm5\" (UniqueName: \"kubernetes.io/projected/434a5cc4-eda4-40f7-a763-c4c61ba909fb-kube-api-access-z8tm5\") pod \"neutron-db-create-n8vqc\" (UID: \"434a5cc4-eda4-40f7-a763-c4c61ba909fb\") " pod="openstack/neutron-db-create-n8vqc" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.621524 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bae30fe-731a-4097-a55f-11dd857f5986-operator-scripts\") pod \"barbican-8572-account-create-update-ltmw6\" (UID: \"8bae30fe-731a-4097-a55f-11dd857f5986\") " pod="openstack/barbican-8572-account-create-update-ltmw6" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.621543 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/434a5cc4-eda4-40f7-a763-c4c61ba909fb-operator-scripts\") pod \"neutron-db-create-n8vqc\" (UID: \"434a5cc4-eda4-40f7-a763-c4c61ba909fb\") " pod="openstack/neutron-db-create-n8vqc" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.622878 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bae30fe-731a-4097-a55f-11dd857f5986-operator-scripts\") pod \"barbican-8572-account-create-update-ltmw6\" (UID: \"8bae30fe-731a-4097-a55f-11dd857f5986\") " pod="openstack/barbican-8572-account-create-update-ltmw6" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.627790 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/666982dd-eefa-464c-ae19-7b0ffcabcf07-combined-ca-bundle\") pod \"keystone-db-sync-dgbrk\" (UID: \"666982dd-eefa-464c-ae19-7b0ffcabcf07\") " pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.628004 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/666982dd-eefa-464c-ae19-7b0ffcabcf07-config-data\") pod \"keystone-db-sync-dgbrk\" (UID: \"666982dd-eefa-464c-ae19-7b0ffcabcf07\") " pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.645761 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qpsb\" (UniqueName: \"kubernetes.io/projected/8bae30fe-731a-4097-a55f-11dd857f5986-kube-api-access-2qpsb\") pod \"barbican-8572-account-create-update-ltmw6\" (UID: \"8bae30fe-731a-4097-a55f-11dd857f5986\") " pod="openstack/barbican-8572-account-create-update-ltmw6" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.650601 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clvtm\" (UniqueName: \"kubernetes.io/projected/666982dd-eefa-464c-ae19-7b0ffcabcf07-kube-api-access-clvtm\") pod \"keystone-db-sync-dgbrk\" (UID: \"666982dd-eefa-464c-ae19-7b0ffcabcf07\") " pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.698423 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.713341 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.723164 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7323378c-0ffc-47fa-88a2-666178163a52-operator-scripts\") pod \"neutron-91af-account-create-update-dkk49\" (UID: \"7323378c-0ffc-47fa-88a2-666178163a52\") " pod="openstack/neutron-91af-account-create-update-dkk49" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.723207 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8tm5\" (UniqueName: \"kubernetes.io/projected/434a5cc4-eda4-40f7-a763-c4c61ba909fb-kube-api-access-z8tm5\") pod \"neutron-db-create-n8vqc\" (UID: \"434a5cc4-eda4-40f7-a763-c4c61ba909fb\") " pod="openstack/neutron-db-create-n8vqc" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.723233 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/434a5cc4-eda4-40f7-a763-c4c61ba909fb-operator-scripts\") pod \"neutron-db-create-n8vqc\" (UID: \"434a5cc4-eda4-40f7-a763-c4c61ba909fb\") " pod="openstack/neutron-db-create-n8vqc" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.723284 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-925mb\" (UniqueName: \"kubernetes.io/projected/7323378c-0ffc-47fa-88a2-666178163a52-kube-api-access-925mb\") pod \"neutron-91af-account-create-update-dkk49\" (UID: \"7323378c-0ffc-47fa-88a2-666178163a52\") " pod="openstack/neutron-91af-account-create-update-dkk49" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.724110 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/434a5cc4-eda4-40f7-a763-c4c61ba909fb-operator-scripts\") pod \"neutron-db-create-n8vqc\" (UID: \"434a5cc4-eda4-40f7-a763-c4c61ba909fb\") " pod="openstack/neutron-db-create-n8vqc" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.724264 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8572-account-create-update-ltmw6" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.724318 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7323378c-0ffc-47fa-88a2-666178163a52-operator-scripts\") pod \"neutron-91af-account-create-update-dkk49\" (UID: \"7323378c-0ffc-47fa-88a2-666178163a52\") " pod="openstack/neutron-91af-account-create-update-dkk49" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.744459 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8tm5\" (UniqueName: \"kubernetes.io/projected/434a5cc4-eda4-40f7-a763-c4c61ba909fb-kube-api-access-z8tm5\") pod \"neutron-db-create-n8vqc\" (UID: \"434a5cc4-eda4-40f7-a763-c4c61ba909fb\") " pod="openstack/neutron-db-create-n8vqc" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.767768 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-925mb\" (UniqueName: \"kubernetes.io/projected/7323378c-0ffc-47fa-88a2-666178163a52-kube-api-access-925mb\") pod \"neutron-91af-account-create-update-dkk49\" (UID: \"7323378c-0ffc-47fa-88a2-666178163a52\") " pod="openstack/neutron-91af-account-create-update-dkk49" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.822067 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-n8vqc" Dec 05 19:34:18 crc kubenswrapper[4982]: I1205 19:34:18.830330 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-91af-account-create-update-dkk49" Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.089409 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-36e3-account-create-update-nrbz9"] Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.206327 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-jhsjf-config-2dgv5"] Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.427875 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-7c96r"] Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.428247 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8572-account-create-update-ltmw6"] Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.604636 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-rhzlm"] Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.840344 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8572-account-create-update-ltmw6" event={"ID":"8bae30fe-731a-4097-a55f-11dd857f5986","Type":"ContainerStarted","Data":"b90dbe6e4560761c5602b9ff22541380527e11d798960209a36b3a585cb86176"} Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.856963 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-13aa-account-create-update-hl7v8"] Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.897340 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"42b922f1e635233068df13cab63a51b788e9dd5341b9ee7d0883eab430df1998"} Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.897384 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"e147340ff7d0dc97becf561d5895aa86ecbce24322f751ac5480841ed15d7f7d"} Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.898713 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-36e3-account-create-update-nrbz9" event={"ID":"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a","Type":"ContainerStarted","Data":"a84c593018586b657355ff28d11cf5b16378847f5717389de75161039b7b9032"} Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.898739 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-36e3-account-create-update-nrbz9" event={"ID":"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a","Type":"ContainerStarted","Data":"d3c536d3d32d96b8badfa200391e3742e4f23e688c026463693c211cf47d5a4c"} Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.916272 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-r4cm5"] Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.927618 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-dgbrk"] Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.927920 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"d120ed4dd81891b8a49ab0c0d10c2698410eb2dc25101b25c8e0a67336b5afef"} Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.937464 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jhsjf-config-2dgv5" event={"ID":"b3825032-58f5-4494-ba6f-984b83186c7b","Type":"ContainerStarted","Data":"876947b31bd90638bae19a664778b0b3760cfc5fd3cb6281c7e669b832b5e498"} Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.940178 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7c96r" event={"ID":"31ed0f9e-af97-4842-8cbf-736e4d9d9a68","Type":"ContainerStarted","Data":"2c2c381bd13422a3e8042d6bd25535859c8a106d8eb6a35ba49327e2bd9f2910"} Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.941080 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-rhzlm" event={"ID":"505849cd-c790-4007-bea2-c6fef9b2fba4","Type":"ContainerStarted","Data":"6e48c85e8b06b2504afdaeb5f2875f1d1216a73857104d5c171d4a53ff25fdae"} Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.942756 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"a65aa27d-1e48-4991-a573-68e9458e1733","Type":"ContainerStarted","Data":"5fc18017c43b6a66929102cc1353d51ad4971653220abd760bb46d2785c654b9"} Dec 05 19:34:23 crc kubenswrapper[4982]: I1205 19:34:23.986481 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-91af-account-create-update-dkk49"] Dec 05 19:34:24 crc kubenswrapper[4982]: W1205 19:34:24.015378 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d61674f_fb7a_4bab_90ad_e4b6c22693ee.slice/crio-e8632c4edbbed0985b28fb4cfa9c356741767ae7afcbbca157e5604b8d095e14 WatchSource:0}: Error finding container e8632c4edbbed0985b28fb4cfa9c356741767ae7afcbbca157e5604b8d095e14: Status 404 returned error can't find the container with id e8632c4edbbed0985b28fb4cfa9c356741767ae7afcbbca157e5604b8d095e14 Dec 05 19:34:24 crc kubenswrapper[4982]: I1205 19:34:24.019296 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-n8vqc"] Dec 05 19:34:24 crc kubenswrapper[4982]: I1205 19:34:24.023521 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-36e3-account-create-update-nrbz9" podStartSLOduration=6.023499996 podStartE2EDuration="6.023499996s" podCreationTimestamp="2025-12-05 19:34:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:23.974037471 +0000 UTC m=+1242.855923466" watchObservedRunningTime="2025-12-05 19:34:24.023499996 +0000 UTC m=+1242.905385981" Dec 05 19:34:24 crc kubenswrapper[4982]: W1205 19:34:24.029349 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod666982dd_eefa_464c_ae19_7b0ffcabcf07.slice/crio-a70148b7778dccafd4d7eb5e4a1dda32f3d69e621d7083def3bc648701b1e1a1 WatchSource:0}: Error finding container a70148b7778dccafd4d7eb5e4a1dda32f3d69e621d7083def3bc648701b1e1a1: Status 404 returned error can't find the container with id a70148b7778dccafd4d7eb5e4a1dda32f3d69e621d7083def3bc648701b1e1a1 Dec 05 19:34:24 crc kubenswrapper[4982]: W1205 19:34:24.042395 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod434a5cc4_eda4_40f7_a763_c4c61ba909fb.slice/crio-ba94a054d2e4367fcf622a9c05d9130635941aa51e1aea23ccae61b5a3d620bf WatchSource:0}: Error finding container ba94a054d2e4367fcf622a9c05d9130635941aa51e1aea23ccae61b5a3d620bf: Status 404 returned error can't find the container with id ba94a054d2e4367fcf622a9c05d9130635941aa51e1aea23ccae61b5a3d620bf Dec 05 19:34:24 crc kubenswrapper[4982]: I1205 19:34:24.050250 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=25.050227659 podStartE2EDuration="25.050227659s" podCreationTimestamp="2025-12-05 19:33:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:24.019011323 +0000 UTC m=+1242.900897328" watchObservedRunningTime="2025-12-05 19:34:24.050227659 +0000 UTC m=+1242.932113654" Dec 05 19:34:24 crc kubenswrapper[4982]: I1205 19:34:24.900585 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 19:34:24 crc kubenswrapper[4982]: I1205 19:34:24.962691 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jhsjf-config-2dgv5" event={"ID":"b3825032-58f5-4494-ba6f-984b83186c7b","Type":"ContainerStarted","Data":"d016887d0517c40d737dc3916a72583b36e50fdb88d1d2d3fa9ac22427453173"} Dec 05 19:34:24 crc kubenswrapper[4982]: I1205 19:34:24.965166 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" event={"ID":"8d61674f-fb7a-4bab-90ad-e4b6c22693ee","Type":"ContainerStarted","Data":"e36a585e1075accbb098ec175960c4944f322f5cecba342c5e99846f508e6227"} Dec 05 19:34:24 crc kubenswrapper[4982]: I1205 19:34:24.965211 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" event={"ID":"8d61674f-fb7a-4bab-90ad-e4b6c22693ee","Type":"ContainerStarted","Data":"e8632c4edbbed0985b28fb4cfa9c356741767ae7afcbbca157e5604b8d095e14"} Dec 05 19:34:24 crc kubenswrapper[4982]: I1205 19:34:24.968359 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-dgbrk" event={"ID":"666982dd-eefa-464c-ae19-7b0ffcabcf07","Type":"ContainerStarted","Data":"a70148b7778dccafd4d7eb5e4a1dda32f3d69e621d7083def3bc648701b1e1a1"} Dec 05 19:34:24 crc kubenswrapper[4982]: I1205 19:34:24.969896 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8572-account-create-update-ltmw6" event={"ID":"8bae30fe-731a-4097-a55f-11dd857f5986","Type":"ContainerStarted","Data":"53ad9660b7924a8d5e03468aa7c3fa1add97006f400009b56282cc49dade2881"} Dec 05 19:34:24 crc kubenswrapper[4982]: I1205 19:34:24.987230 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"900ff3be25467e26e58b494b8b3ca7245dd06d7dcc46a5c842d9b6046abf78f8"} Dec 05 19:34:24 crc kubenswrapper[4982]: I1205 19:34:24.998369 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-rhzlm" event={"ID":"505849cd-c790-4007-bea2-c6fef9b2fba4","Type":"ContainerStarted","Data":"08c8bf3a282428c2f1ff8240a2d7cc9156faf8429d6bb67a63de7dff8e0c00d2"} Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.000759 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-jhsjf-config-2dgv5" podStartSLOduration=13.000747471 podStartE2EDuration="13.000747471s" podCreationTimestamp="2025-12-05 19:34:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:24.984629375 +0000 UTC m=+1243.866515370" watchObservedRunningTime="2025-12-05 19:34:25.000747471 +0000 UTC m=+1243.882633466" Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.010320 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-r4cm5" event={"ID":"af444583-8979-46e6-adc3-83cc5d6fbbcf","Type":"ContainerStarted","Data":"030c04ecc5e458fd10bc3ba028f2e77c8d47af7da3e26294152ca844c17d9d14"} Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.010353 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-r4cm5" event={"ID":"af444583-8979-46e6-adc3-83cc5d6fbbcf","Type":"ContainerStarted","Data":"273292c84742983fbd0cc23596bb98af3fe9d0f20916302adaa75a3759adab61"} Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.023090 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-n8vqc" event={"ID":"434a5cc4-eda4-40f7-a763-c4c61ba909fb","Type":"ContainerStarted","Data":"d3b53ffe23ae5288747b97866c1d9d84c6132943c6d4ba016425db683b69f842"} Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.023210 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-n8vqc" event={"ID":"434a5cc4-eda4-40f7-a763-c4c61ba909fb","Type":"ContainerStarted","Data":"ba94a054d2e4367fcf622a9c05d9130635941aa51e1aea23ccae61b5a3d620bf"} Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.031202 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-8572-account-create-update-ltmw6" podStartSLOduration=7.031170836 podStartE2EDuration="7.031170836s" podCreationTimestamp="2025-12-05 19:34:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:25.00549608 +0000 UTC m=+1243.887382075" watchObservedRunningTime="2025-12-05 19:34:25.031170836 +0000 UTC m=+1243.913056841" Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.034938 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-91af-account-create-update-dkk49" event={"ID":"7323378c-0ffc-47fa-88a2-666178163a52","Type":"ContainerStarted","Data":"80398a6a190f0f1921c183268c9869f89478721ebe65239e1b99eb74d9fee4db"} Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.034991 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-91af-account-create-update-dkk49" event={"ID":"7323378c-0ffc-47fa-88a2-666178163a52","Type":"ContainerStarted","Data":"af88c72cd4d1157764b3cd46352eea398fe9e10219ba747444c8d1b13d5b46e4"} Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.051905 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jbfm4" event={"ID":"a404304e-f5bf-4931-8577-9161a96cfd8d","Type":"ContainerStarted","Data":"7cb1483c9751a0a187666105343480f7cbf8fdb1ccd8301135c2b226a4eaa9e2"} Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.072915 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7c96r" event={"ID":"31ed0f9e-af97-4842-8cbf-736e4d9d9a68","Type":"ContainerStarted","Data":"c9f5917405d962414ba72ec97595e839d7404cf30d52b21ed9fe8bd9f13120e9"} Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.090628 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" podStartSLOduration=7.090605022 podStartE2EDuration="7.090605022s" podCreationTimestamp="2025-12-05 19:34:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:25.039176798 +0000 UTC m=+1243.921062793" watchObservedRunningTime="2025-12-05 19:34:25.090605022 +0000 UTC m=+1243.972491027" Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.104687 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-rhzlm" podStartSLOduration=7.104661576 podStartE2EDuration="7.104661576s" podCreationTimestamp="2025-12-05 19:34:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:25.069010079 +0000 UTC m=+1243.950896084" watchObservedRunningTime="2025-12-05 19:34:25.104661576 +0000 UTC m=+1243.986547591" Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.111240 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-n8vqc" podStartSLOduration=7.111216461 podStartE2EDuration="7.111216461s" podCreationTimestamp="2025-12-05 19:34:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:25.089695459 +0000 UTC m=+1243.971581444" watchObservedRunningTime="2025-12-05 19:34:25.111216461 +0000 UTC m=+1243.993102466" Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.120439 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-91af-account-create-update-dkk49" podStartSLOduration=7.120419123 podStartE2EDuration="7.120419123s" podCreationTimestamp="2025-12-05 19:34:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:25.109797095 +0000 UTC m=+1243.991683090" watchObservedRunningTime="2025-12-05 19:34:25.120419123 +0000 UTC m=+1244.002305118" Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.144693 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-create-r4cm5" podStartSLOduration=7.144667943 podStartE2EDuration="7.144667943s" podCreationTimestamp="2025-12-05 19:34:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:25.14179137 +0000 UTC m=+1244.023677375" watchObservedRunningTime="2025-12-05 19:34:25.144667943 +0000 UTC m=+1244.026553928" Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.176990 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-jbfm4" podStartSLOduration=3.217985987 podStartE2EDuration="19.176972556s" podCreationTimestamp="2025-12-05 19:34:06 +0000 UTC" firstStartedPulling="2025-12-05 19:34:06.950135481 +0000 UTC m=+1225.832021476" lastFinishedPulling="2025-12-05 19:34:22.90912205 +0000 UTC m=+1241.791008045" observedRunningTime="2025-12-05 19:34:25.175788286 +0000 UTC m=+1244.057674291" watchObservedRunningTime="2025-12-05 19:34:25.176972556 +0000 UTC m=+1244.058858551" Dec 05 19:34:25 crc kubenswrapper[4982]: I1205 19:34:25.197407 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-7c96r" podStartSLOduration=8.19738607 podStartE2EDuration="8.19738607s" podCreationTimestamp="2025-12-05 19:34:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:25.188718741 +0000 UTC m=+1244.070604736" watchObservedRunningTime="2025-12-05 19:34:25.19738607 +0000 UTC m=+1244.079272065" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.087498 4982 generic.go:334] "Generic (PLEG): container finished" podID="af444583-8979-46e6-adc3-83cc5d6fbbcf" containerID="030c04ecc5e458fd10bc3ba028f2e77c8d47af7da3e26294152ca844c17d9d14" exitCode=0 Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.088082 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-r4cm5" event={"ID":"af444583-8979-46e6-adc3-83cc5d6fbbcf","Type":"ContainerDied","Data":"030c04ecc5e458fd10bc3ba028f2e77c8d47af7da3e26294152ca844c17d9d14"} Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.089898 4982 generic.go:334] "Generic (PLEG): container finished" podID="8d61674f-fb7a-4bab-90ad-e4b6c22693ee" containerID="e36a585e1075accbb098ec175960c4944f322f5cecba342c5e99846f508e6227" exitCode=0 Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.089963 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" event={"ID":"8d61674f-fb7a-4bab-90ad-e4b6c22693ee","Type":"ContainerDied","Data":"e36a585e1075accbb098ec175960c4944f322f5cecba342c5e99846f508e6227"} Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.095758 4982 generic.go:334] "Generic (PLEG): container finished" podID="434a5cc4-eda4-40f7-a763-c4c61ba909fb" containerID="d3b53ffe23ae5288747b97866c1d9d84c6132943c6d4ba016425db683b69f842" exitCode=0 Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.095955 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-n8vqc" event={"ID":"434a5cc4-eda4-40f7-a763-c4c61ba909fb","Type":"ContainerDied","Data":"d3b53ffe23ae5288747b97866c1d9d84c6132943c6d4ba016425db683b69f842"} Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.097723 4982 generic.go:334] "Generic (PLEG): container finished" podID="8bae30fe-731a-4097-a55f-11dd857f5986" containerID="53ad9660b7924a8d5e03468aa7c3fa1add97006f400009b56282cc49dade2881" exitCode=0 Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.097771 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8572-account-create-update-ltmw6" event={"ID":"8bae30fe-731a-4097-a55f-11dd857f5986","Type":"ContainerDied","Data":"53ad9660b7924a8d5e03468aa7c3fa1add97006f400009b56282cc49dade2881"} Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.110179 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"e96f7a58f468ab388bf852d4f7f582f42b04de2b9cb7b6a9500446a3fea2c7ff"} Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.110226 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"2e38e3301a4f7e6a831ca3c6b5633cf0d46173a8d10cc6303c5fa3c4d5ed1f7d"} Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.110237 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"03bef3aa-7dbf-41c2-8754-7be39af98913","Type":"ContainerStarted","Data":"3ea1859c3befe28f8dd55851972cbbba14d3ae73f59cd9089d20b21c0e702f66"} Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.113979 4982 generic.go:334] "Generic (PLEG): container finished" podID="25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a" containerID="a84c593018586b657355ff28d11cf5b16378847f5717389de75161039b7b9032" exitCode=0 Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.114033 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-36e3-account-create-update-nrbz9" event={"ID":"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a","Type":"ContainerDied","Data":"a84c593018586b657355ff28d11cf5b16378847f5717389de75161039b7b9032"} Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.123358 4982 generic.go:334] "Generic (PLEG): container finished" podID="7323378c-0ffc-47fa-88a2-666178163a52" containerID="80398a6a190f0f1921c183268c9869f89478721ebe65239e1b99eb74d9fee4db" exitCode=0 Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.123465 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-91af-account-create-update-dkk49" event={"ID":"7323378c-0ffc-47fa-88a2-666178163a52","Type":"ContainerDied","Data":"80398a6a190f0f1921c183268c9869f89478721ebe65239e1b99eb74d9fee4db"} Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.128245 4982 generic.go:334] "Generic (PLEG): container finished" podID="b3825032-58f5-4494-ba6f-984b83186c7b" containerID="d016887d0517c40d737dc3916a72583b36e50fdb88d1d2d3fa9ac22427453173" exitCode=0 Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.128320 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jhsjf-config-2dgv5" event={"ID":"b3825032-58f5-4494-ba6f-984b83186c7b","Type":"ContainerDied","Data":"d016887d0517c40d737dc3916a72583b36e50fdb88d1d2d3fa9ac22427453173"} Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.129926 4982 generic.go:334] "Generic (PLEG): container finished" podID="31ed0f9e-af97-4842-8cbf-736e4d9d9a68" containerID="c9f5917405d962414ba72ec97595e839d7404cf30d52b21ed9fe8bd9f13120e9" exitCode=0 Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.130004 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7c96r" event={"ID":"31ed0f9e-af97-4842-8cbf-736e4d9d9a68","Type":"ContainerDied","Data":"c9f5917405d962414ba72ec97595e839d7404cf30d52b21ed9fe8bd9f13120e9"} Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.131408 4982 generic.go:334] "Generic (PLEG): container finished" podID="505849cd-c790-4007-bea2-c6fef9b2fba4" containerID="08c8bf3a282428c2f1ff8240a2d7cc9156faf8429d6bb67a63de7dff8e0c00d2" exitCode=0 Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.131447 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-rhzlm" event={"ID":"505849cd-c790-4007-bea2-c6fef9b2fba4","Type":"ContainerDied","Data":"08c8bf3a282428c2f1ff8240a2d7cc9156faf8429d6bb67a63de7dff8e0c00d2"} Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.226125 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=47.769546604 podStartE2EDuration="54.226108801s" podCreationTimestamp="2025-12-05 19:33:32 +0000 UTC" firstStartedPulling="2025-12-05 19:34:06.266364137 +0000 UTC m=+1225.148250132" lastFinishedPulling="2025-12-05 19:34:12.722926334 +0000 UTC m=+1231.604812329" observedRunningTime="2025-12-05 19:34:26.216597491 +0000 UTC m=+1245.098483486" watchObservedRunningTime="2025-12-05 19:34:26.226108801 +0000 UTC m=+1245.107994796" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.503929 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-2q5jw"] Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.506009 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.529423 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-2q5jw"] Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.559461 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.631438 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-config\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.631499 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.631598 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.631644 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-md7n9\" (UniqueName: \"kubernetes.io/projected/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-kube-api-access-md7n9\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.631684 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.631784 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.733257 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.733376 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.733414 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-md7n9\" (UniqueName: \"kubernetes.io/projected/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-kube-api-access-md7n9\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.733468 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.733826 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.733900 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-config\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.734375 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.734435 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.734500 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.734657 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-config\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.734722 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.751983 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-md7n9\" (UniqueName: \"kubernetes.io/projected/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-kube-api-access-md7n9\") pod \"dnsmasq-dns-6d5b6d6b67-2q5jw\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:26 crc kubenswrapper[4982]: I1205 19:34:26.885218 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:29 crc kubenswrapper[4982]: I1205 19:34:29.471442 4982 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","podce039d16-a4f9-40f3-9398-f6c2efc89b41"] err="unable to destroy cgroup paths for cgroup [kubepods burstable podce039d16-a4f9-40f3-9398-f6c2efc89b41] : Timed out while waiting for systemd to remove kubepods-burstable-podce039d16_a4f9_40f3_9398_f6c2efc89b41.slice" Dec 05 19:34:29 crc kubenswrapper[4982]: I1205 19:34:29.900660 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 19:34:29 crc kubenswrapper[4982]: I1205 19:34:29.908337 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.200814 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-91af-account-create-update-dkk49" event={"ID":"7323378c-0ffc-47fa-88a2-666178163a52","Type":"ContainerDied","Data":"af88c72cd4d1157764b3cd46352eea398fe9e10219ba747444c8d1b13d5b46e4"} Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.201229 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af88c72cd4d1157764b3cd46352eea398fe9e10219ba747444c8d1b13d5b46e4" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.209550 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-36e3-account-create-update-nrbz9" event={"ID":"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a","Type":"ContainerDied","Data":"d3c536d3d32d96b8badfa200391e3742e4f23e688c026463693c211cf47d5a4c"} Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.209585 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3c536d3d32d96b8badfa200391e3742e4f23e688c026463693c211cf47d5a4c" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.210766 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-7c96r" event={"ID":"31ed0f9e-af97-4842-8cbf-736e4d9d9a68","Type":"ContainerDied","Data":"2c2c381bd13422a3e8042d6bd25535859c8a106d8eb6a35ba49327e2bd9f2910"} Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.210789 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c2c381bd13422a3e8042d6bd25535859c8a106d8eb6a35ba49327e2bd9f2910" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.211747 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-r4cm5" event={"ID":"af444583-8979-46e6-adc3-83cc5d6fbbcf","Type":"ContainerDied","Data":"273292c84742983fbd0cc23596bb98af3fe9d0f20916302adaa75a3759adab61"} Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.211768 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="273292c84742983fbd0cc23596bb98af3fe9d0f20916302adaa75a3759adab61" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.212943 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-n8vqc" event={"ID":"434a5cc4-eda4-40f7-a763-c4c61ba909fb","Type":"ContainerDied","Data":"ba94a054d2e4367fcf622a9c05d9130635941aa51e1aea23ccae61b5a3d620bf"} Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.212978 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba94a054d2e4367fcf622a9c05d9130635941aa51e1aea23ccae61b5a3d620bf" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.221430 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-jhsjf-config-2dgv5" event={"ID":"b3825032-58f5-4494-ba6f-984b83186c7b","Type":"ContainerDied","Data":"876947b31bd90638bae19a664778b0b3760cfc5fd3cb6281c7e669b832b5e498"} Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.221471 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="876947b31bd90638bae19a664778b0b3760cfc5fd3cb6281c7e669b832b5e498" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.226334 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-rhzlm" event={"ID":"505849cd-c790-4007-bea2-c6fef9b2fba4","Type":"ContainerDied","Data":"6e48c85e8b06b2504afdaeb5f2875f1d1216a73857104d5c171d4a53ff25fdae"} Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.226361 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e48c85e8b06b2504afdaeb5f2875f1d1216a73857104d5c171d4a53ff25fdae" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.231173 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" event={"ID":"8d61674f-fb7a-4bab-90ad-e4b6c22693ee","Type":"ContainerDied","Data":"e8632c4edbbed0985b28fb4cfa9c356741767ae7afcbbca157e5604b8d095e14"} Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.231195 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8632c4edbbed0985b28fb4cfa9c356741767ae7afcbbca157e5604b8d095e14" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.235062 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8572-account-create-update-ltmw6" event={"ID":"8bae30fe-731a-4097-a55f-11dd857f5986","Type":"ContainerDied","Data":"b90dbe6e4560761c5602b9ff22541380527e11d798960209a36b3a585cb86176"} Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.235088 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b90dbe6e4560761c5602b9ff22541380527e11d798960209a36b3a585cb86176" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.238926 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.278806 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-n8vqc" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.286140 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8572-account-create-update-ltmw6" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.304296 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/434a5cc4-eda4-40f7-a763-c4c61ba909fb-operator-scripts\") pod \"434a5cc4-eda4-40f7-a763-c4c61ba909fb\" (UID: \"434a5cc4-eda4-40f7-a763-c4c61ba909fb\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.304444 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8tm5\" (UniqueName: \"kubernetes.io/projected/434a5cc4-eda4-40f7-a763-c4c61ba909fb-kube-api-access-z8tm5\") pod \"434a5cc4-eda4-40f7-a763-c4c61ba909fb\" (UID: \"434a5cc4-eda4-40f7-a763-c4c61ba909fb\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.304512 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qpsb\" (UniqueName: \"kubernetes.io/projected/8bae30fe-731a-4097-a55f-11dd857f5986-kube-api-access-2qpsb\") pod \"8bae30fe-731a-4097-a55f-11dd857f5986\" (UID: \"8bae30fe-731a-4097-a55f-11dd857f5986\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.304577 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bae30fe-731a-4097-a55f-11dd857f5986-operator-scripts\") pod \"8bae30fe-731a-4097-a55f-11dd857f5986\" (UID: \"8bae30fe-731a-4097-a55f-11dd857f5986\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.309848 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/434a5cc4-eda4-40f7-a763-c4c61ba909fb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "434a5cc4-eda4-40f7-a763-c4c61ba909fb" (UID: "434a5cc4-eda4-40f7-a763-c4c61ba909fb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.311034 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bae30fe-731a-4097-a55f-11dd857f5986-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8bae30fe-731a-4097-a55f-11dd857f5986" (UID: "8bae30fe-731a-4097-a55f-11dd857f5986"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.311221 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.313588 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bae30fe-731a-4097-a55f-11dd857f5986-kube-api-access-2qpsb" (OuterVolumeSpecName: "kube-api-access-2qpsb") pod "8bae30fe-731a-4097-a55f-11dd857f5986" (UID: "8bae30fe-731a-4097-a55f-11dd857f5986"). InnerVolumeSpecName "kube-api-access-2qpsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.314541 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/434a5cc4-eda4-40f7-a763-c4c61ba909fb-kube-api-access-z8tm5" (OuterVolumeSpecName: "kube-api-access-z8tm5") pod "434a5cc4-eda4-40f7-a763-c4c61ba909fb" (UID: "434a5cc4-eda4-40f7-a763-c4c61ba909fb"). InnerVolumeSpecName "kube-api-access-z8tm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.398900 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.405770 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4gqkn\" (UniqueName: \"kubernetes.io/projected/b3825032-58f5-4494-ba6f-984b83186c7b-kube-api-access-4gqkn\") pod \"b3825032-58f5-4494-ba6f-984b83186c7b\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.405859 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3825032-58f5-4494-ba6f-984b83186c7b-scripts\") pod \"b3825032-58f5-4494-ba6f-984b83186c7b\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.405921 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-run-ovn\") pod \"b3825032-58f5-4494-ba6f-984b83186c7b\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.406038 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b3825032-58f5-4494-ba6f-984b83186c7b-additional-scripts\") pod \"b3825032-58f5-4494-ba6f-984b83186c7b\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.406105 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-log-ovn\") pod \"b3825032-58f5-4494-ba6f-984b83186c7b\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.406186 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-run\") pod \"b3825032-58f5-4494-ba6f-984b83186c7b\" (UID: \"b3825032-58f5-4494-ba6f-984b83186c7b\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.406666 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8bae30fe-731a-4097-a55f-11dd857f5986-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.406698 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/434a5cc4-eda4-40f7-a763-c4c61ba909fb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.406711 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8tm5\" (UniqueName: \"kubernetes.io/projected/434a5cc4-eda4-40f7-a763-c4c61ba909fb-kube-api-access-z8tm5\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.406725 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qpsb\" (UniqueName: \"kubernetes.io/projected/8bae30fe-731a-4097-a55f-11dd857f5986-kube-api-access-2qpsb\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.406772 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-run" (OuterVolumeSpecName: "var-run") pod "b3825032-58f5-4494-ba6f-984b83186c7b" (UID: "b3825032-58f5-4494-ba6f-984b83186c7b"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.409903 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "b3825032-58f5-4494-ba6f-984b83186c7b" (UID: "b3825032-58f5-4494-ba6f-984b83186c7b"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.410117 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3825032-58f5-4494-ba6f-984b83186c7b-scripts" (OuterVolumeSpecName: "scripts") pod "b3825032-58f5-4494-ba6f-984b83186c7b" (UID: "b3825032-58f5-4494-ba6f-984b83186c7b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.410268 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "b3825032-58f5-4494-ba6f-984b83186c7b" (UID: "b3825032-58f5-4494-ba6f-984b83186c7b"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.411233 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3825032-58f5-4494-ba6f-984b83186c7b-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "b3825032-58f5-4494-ba6f-984b83186c7b" (UID: "b3825032-58f5-4494-ba6f-984b83186c7b"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.419205 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-rhzlm" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.419399 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3825032-58f5-4494-ba6f-984b83186c7b-kube-api-access-4gqkn" (OuterVolumeSpecName: "kube-api-access-4gqkn") pod "b3825032-58f5-4494-ba6f-984b83186c7b" (UID: "b3825032-58f5-4494-ba6f-984b83186c7b"). InnerVolumeSpecName "kube-api-access-4gqkn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.435507 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-r4cm5" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.449787 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7c96r" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.467417 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-91af-account-create-update-dkk49" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.479769 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-36e3-account-create-update-nrbz9" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.507923 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d61674f-fb7a-4bab-90ad-e4b6c22693ee-operator-scripts\") pod \"8d61674f-fb7a-4bab-90ad-e4b6c22693ee\" (UID: \"8d61674f-fb7a-4bab-90ad-e4b6c22693ee\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.507994 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/505849cd-c790-4007-bea2-c6fef9b2fba4-operator-scripts\") pod \"505849cd-c790-4007-bea2-c6fef9b2fba4\" (UID: \"505849cd-c790-4007-bea2-c6fef9b2fba4\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508020 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31ed0f9e-af97-4842-8cbf-736e4d9d9a68-operator-scripts\") pod \"31ed0f9e-af97-4842-8cbf-736e4d9d9a68\" (UID: \"31ed0f9e-af97-4842-8cbf-736e4d9d9a68\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508035 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af444583-8979-46e6-adc3-83cc5d6fbbcf-operator-scripts\") pod \"af444583-8979-46e6-adc3-83cc5d6fbbcf\" (UID: \"af444583-8979-46e6-adc3-83cc5d6fbbcf\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508083 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7323378c-0ffc-47fa-88a2-666178163a52-operator-scripts\") pod \"7323378c-0ffc-47fa-88a2-666178163a52\" (UID: \"7323378c-0ffc-47fa-88a2-666178163a52\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508100 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ts8mv\" (UniqueName: \"kubernetes.io/projected/8d61674f-fb7a-4bab-90ad-e4b6c22693ee-kube-api-access-ts8mv\") pod \"8d61674f-fb7a-4bab-90ad-e4b6c22693ee\" (UID: \"8d61674f-fb7a-4bab-90ad-e4b6c22693ee\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508175 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-925mb\" (UniqueName: \"kubernetes.io/projected/7323378c-0ffc-47fa-88a2-666178163a52-kube-api-access-925mb\") pod \"7323378c-0ffc-47fa-88a2-666178163a52\" (UID: \"7323378c-0ffc-47fa-88a2-666178163a52\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508193 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl5f4\" (UniqueName: \"kubernetes.io/projected/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a-kube-api-access-dl5f4\") pod \"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a\" (UID: \"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508235 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r794t\" (UniqueName: \"kubernetes.io/projected/505849cd-c790-4007-bea2-c6fef9b2fba4-kube-api-access-r794t\") pod \"505849cd-c790-4007-bea2-c6fef9b2fba4\" (UID: \"505849cd-c790-4007-bea2-c6fef9b2fba4\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508300 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g4zmb\" (UniqueName: \"kubernetes.io/projected/31ed0f9e-af97-4842-8cbf-736e4d9d9a68-kube-api-access-g4zmb\") pod \"31ed0f9e-af97-4842-8cbf-736e4d9d9a68\" (UID: \"31ed0f9e-af97-4842-8cbf-736e4d9d9a68\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508314 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a-operator-scripts\") pod \"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a\" (UID: \"25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508339 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpqpr\" (UniqueName: \"kubernetes.io/projected/af444583-8979-46e6-adc3-83cc5d6fbbcf-kube-api-access-xpqpr\") pod \"af444583-8979-46e6-adc3-83cc5d6fbbcf\" (UID: \"af444583-8979-46e6-adc3-83cc5d6fbbcf\") " Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508848 4982 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508866 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4gqkn\" (UniqueName: \"kubernetes.io/projected/b3825032-58f5-4494-ba6f-984b83186c7b-kube-api-access-4gqkn\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508876 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/b3825032-58f5-4494-ba6f-984b83186c7b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508885 4982 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508893 4982 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/b3825032-58f5-4494-ba6f-984b83186c7b-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.508902 4982 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/b3825032-58f5-4494-ba6f-984b83186c7b-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.511116 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7323378c-0ffc-47fa-88a2-666178163a52-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7323378c-0ffc-47fa-88a2-666178163a52" (UID: "7323378c-0ffc-47fa-88a2-666178163a52"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.511533 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d61674f-fb7a-4bab-90ad-e4b6c22693ee-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8d61674f-fb7a-4bab-90ad-e4b6c22693ee" (UID: "8d61674f-fb7a-4bab-90ad-e4b6c22693ee"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.511856 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/505849cd-c790-4007-bea2-c6fef9b2fba4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "505849cd-c790-4007-bea2-c6fef9b2fba4" (UID: "505849cd-c790-4007-bea2-c6fef9b2fba4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.512510 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31ed0f9e-af97-4842-8cbf-736e4d9d9a68-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "31ed0f9e-af97-4842-8cbf-736e4d9d9a68" (UID: "31ed0f9e-af97-4842-8cbf-736e4d9d9a68"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.513009 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/af444583-8979-46e6-adc3-83cc5d6fbbcf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "af444583-8979-46e6-adc3-83cc5d6fbbcf" (UID: "af444583-8979-46e6-adc3-83cc5d6fbbcf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.519254 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a" (UID: "25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.519761 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a-kube-api-access-dl5f4" (OuterVolumeSpecName: "kube-api-access-dl5f4") pod "25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a" (UID: "25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a"). InnerVolumeSpecName "kube-api-access-dl5f4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.521611 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7323378c-0ffc-47fa-88a2-666178163a52-kube-api-access-925mb" (OuterVolumeSpecName: "kube-api-access-925mb") pod "7323378c-0ffc-47fa-88a2-666178163a52" (UID: "7323378c-0ffc-47fa-88a2-666178163a52"). InnerVolumeSpecName "kube-api-access-925mb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.521784 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31ed0f9e-af97-4842-8cbf-736e4d9d9a68-kube-api-access-g4zmb" (OuterVolumeSpecName: "kube-api-access-g4zmb") pod "31ed0f9e-af97-4842-8cbf-736e4d9d9a68" (UID: "31ed0f9e-af97-4842-8cbf-736e4d9d9a68"). InnerVolumeSpecName "kube-api-access-g4zmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.522306 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/505849cd-c790-4007-bea2-c6fef9b2fba4-kube-api-access-r794t" (OuterVolumeSpecName: "kube-api-access-r794t") pod "505849cd-c790-4007-bea2-c6fef9b2fba4" (UID: "505849cd-c790-4007-bea2-c6fef9b2fba4"). InnerVolumeSpecName "kube-api-access-r794t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.522430 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af444583-8979-46e6-adc3-83cc5d6fbbcf-kube-api-access-xpqpr" (OuterVolumeSpecName: "kube-api-access-xpqpr") pod "af444583-8979-46e6-adc3-83cc5d6fbbcf" (UID: "af444583-8979-46e6-adc3-83cc5d6fbbcf"). InnerVolumeSpecName "kube-api-access-xpqpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.531091 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d61674f-fb7a-4bab-90ad-e4b6c22693ee-kube-api-access-ts8mv" (OuterVolumeSpecName: "kube-api-access-ts8mv") pod "8d61674f-fb7a-4bab-90ad-e4b6c22693ee" (UID: "8d61674f-fb7a-4bab-90ad-e4b6c22693ee"). InnerVolumeSpecName "kube-api-access-ts8mv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.571784 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-2q5jw"] Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.610459 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8d61674f-fb7a-4bab-90ad-e4b6c22693ee-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.610501 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/505849cd-c790-4007-bea2-c6fef9b2fba4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.610512 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/31ed0f9e-af97-4842-8cbf-736e4d9d9a68-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.610521 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/af444583-8979-46e6-adc3-83cc5d6fbbcf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.610531 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ts8mv\" (UniqueName: \"kubernetes.io/projected/8d61674f-fb7a-4bab-90ad-e4b6c22693ee-kube-api-access-ts8mv\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.610542 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7323378c-0ffc-47fa-88a2-666178163a52-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.610550 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl5f4\" (UniqueName: \"kubernetes.io/projected/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a-kube-api-access-dl5f4\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.610558 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-925mb\" (UniqueName: \"kubernetes.io/projected/7323378c-0ffc-47fa-88a2-666178163a52-kube-api-access-925mb\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.610567 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r794t\" (UniqueName: \"kubernetes.io/projected/505849cd-c790-4007-bea2-c6fef9b2fba4-kube-api-access-r794t\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.610575 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g4zmb\" (UniqueName: \"kubernetes.io/projected/31ed0f9e-af97-4842-8cbf-736e4d9d9a68-kube-api-access-g4zmb\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.610582 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:30 crc kubenswrapper[4982]: I1205 19:34:30.610590 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpqpr\" (UniqueName: \"kubernetes.io/projected/af444583-8979-46e6-adc3-83cc5d6fbbcf-kube-api-access-xpqpr\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.243730 4982 generic.go:334] "Generic (PLEG): container finished" podID="6572d0e2-18b4-4e4e-8671-1e897b89dcdd" containerID="db18d66ccfd2c3aa8f06479440109f9a50c5a0ffbb3b3d606695ff19f029075f" exitCode=0 Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.243794 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" event={"ID":"6572d0e2-18b4-4e4e-8671-1e897b89dcdd","Type":"ContainerDied","Data":"db18d66ccfd2c3aa8f06479440109f9a50c5a0ffbb3b3d606695ff19f029075f"} Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.243820 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" event={"ID":"6572d0e2-18b4-4e4e-8671-1e897b89dcdd","Type":"ContainerStarted","Data":"fbe381cf996b4325b2ead65e326dde10c8202c599881efb6535f0a5443a22461"} Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.245810 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8572-account-create-update-ltmw6" Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.245805 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-91af-account-create-update-dkk49" Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.245815 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-n8vqc" Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.245868 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-dgbrk" event={"ID":"666982dd-eefa-464c-ae19-7b0ffcabcf07","Type":"ContainerStarted","Data":"48c7db6740b1555fdbe7f1ad0652f97e3636d9a90b73b489e4c3a495e3f0fabd"} Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.246072 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-jhsjf-config-2dgv5" Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.246091 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-rhzlm" Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.246132 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-7c96r" Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.246175 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-13aa-account-create-update-hl7v8" Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.246186 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-36e3-account-create-update-nrbz9" Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.246426 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-r4cm5" Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.331485 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-dgbrk" podStartSLOduration=7.314478978 podStartE2EDuration="13.331464631s" podCreationTimestamp="2025-12-05 19:34:18 +0000 UTC" firstStartedPulling="2025-12-05 19:34:24.071028153 +0000 UTC m=+1242.952914148" lastFinishedPulling="2025-12-05 19:34:30.088013806 +0000 UTC m=+1248.969899801" observedRunningTime="2025-12-05 19:34:31.297091916 +0000 UTC m=+1250.178977921" watchObservedRunningTime="2025-12-05 19:34:31.331464631 +0000 UTC m=+1250.213350626" Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.456080 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-jhsjf-config-2dgv5"] Dec 05 19:34:31 crc kubenswrapper[4982]: I1205 19:34:31.456126 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-jhsjf-config-2dgv5"] Dec 05 19:34:32 crc kubenswrapper[4982]: I1205 19:34:32.260383 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" event={"ID":"6572d0e2-18b4-4e4e-8671-1e897b89dcdd","Type":"ContainerStarted","Data":"8ef716c915abf9a1a3d7d4d21d66fa2bbfa557383069b295d882b8f94e4a9c2c"} Dec 05 19:34:32 crc kubenswrapper[4982]: I1205 19:34:32.260799 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:32 crc kubenswrapper[4982]: I1205 19:34:32.286297 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" podStartSLOduration=6.286283321 podStartE2EDuration="6.286283321s" podCreationTimestamp="2025-12-05 19:34:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:32.283327696 +0000 UTC m=+1251.165213721" watchObservedRunningTime="2025-12-05 19:34:32.286283321 +0000 UTC m=+1251.168169316" Dec 05 19:34:33 crc kubenswrapper[4982]: I1205 19:34:33.274501 4982 generic.go:334] "Generic (PLEG): container finished" podID="a404304e-f5bf-4931-8577-9161a96cfd8d" containerID="7cb1483c9751a0a187666105343480f7cbf8fdb1ccd8301135c2b226a4eaa9e2" exitCode=0 Dec 05 19:34:33 crc kubenswrapper[4982]: I1205 19:34:33.274608 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jbfm4" event={"ID":"a404304e-f5bf-4931-8577-9161a96cfd8d","Type":"ContainerDied","Data":"7cb1483c9751a0a187666105343480f7cbf8fdb1ccd8301135c2b226a4eaa9e2"} Dec 05 19:34:33 crc kubenswrapper[4982]: I1205 19:34:33.407583 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3825032-58f5-4494-ba6f-984b83186c7b" path="/var/lib/kubelet/pods/b3825032-58f5-4494-ba6f-984b83186c7b/volumes" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.182521 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.193657 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-config-data\") pod \"a404304e-f5bf-4931-8577-9161a96cfd8d\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.193697 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-combined-ca-bundle\") pod \"a404304e-f5bf-4931-8577-9161a96cfd8d\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.193738 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-db-sync-config-data\") pod \"a404304e-f5bf-4931-8577-9161a96cfd8d\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.193878 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bb4jj\" (UniqueName: \"kubernetes.io/projected/a404304e-f5bf-4931-8577-9161a96cfd8d-kube-api-access-bb4jj\") pod \"a404304e-f5bf-4931-8577-9161a96cfd8d\" (UID: \"a404304e-f5bf-4931-8577-9161a96cfd8d\") " Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.202607 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a404304e-f5bf-4931-8577-9161a96cfd8d-kube-api-access-bb4jj" (OuterVolumeSpecName: "kube-api-access-bb4jj") pod "a404304e-f5bf-4931-8577-9161a96cfd8d" (UID: "a404304e-f5bf-4931-8577-9161a96cfd8d"). InnerVolumeSpecName "kube-api-access-bb4jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.212183 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "a404304e-f5bf-4931-8577-9161a96cfd8d" (UID: "a404304e-f5bf-4931-8577-9161a96cfd8d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.243980 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a404304e-f5bf-4931-8577-9161a96cfd8d" (UID: "a404304e-f5bf-4931-8577-9161a96cfd8d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.254653 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-config-data" (OuterVolumeSpecName: "config-data") pod "a404304e-f5bf-4931-8577-9161a96cfd8d" (UID: "a404304e-f5bf-4931-8577-9161a96cfd8d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.295641 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bb4jj\" (UniqueName: \"kubernetes.io/projected/a404304e-f5bf-4931-8577-9161a96cfd8d-kube-api-access-bb4jj\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.295698 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.295708 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.295716 4982 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/a404304e-f5bf-4931-8577-9161a96cfd8d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.297049 4982 generic.go:334] "Generic (PLEG): container finished" podID="666982dd-eefa-464c-ae19-7b0ffcabcf07" containerID="48c7db6740b1555fdbe7f1ad0652f97e3636d9a90b73b489e4c3a495e3f0fabd" exitCode=0 Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.297112 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-dgbrk" event={"ID":"666982dd-eefa-464c-ae19-7b0ffcabcf07","Type":"ContainerDied","Data":"48c7db6740b1555fdbe7f1ad0652f97e3636d9a90b73b489e4c3a495e3f0fabd"} Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.305626 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jbfm4" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.305772 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jbfm4" event={"ID":"a404304e-f5bf-4931-8577-9161a96cfd8d","Type":"ContainerDied","Data":"1a2241f85519afb205feeafd9177ebc861d0c3fad76788a063def5e6ff80d57d"} Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.305801 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a2241f85519afb205feeafd9177ebc861d0c3fad76788a063def5e6ff80d57d" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.740491 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-2q5jw"] Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.740941 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" podUID="6572d0e2-18b4-4e4e-8671-1e897b89dcdd" containerName="dnsmasq-dns" containerID="cri-o://8ef716c915abf9a1a3d7d4d21d66fa2bbfa557383069b295d882b8f94e4a9c2c" gracePeriod=10 Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.832550 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-6rm75"] Dec 05 19:34:35 crc kubenswrapper[4982]: E1205 19:34:35.833005 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d61674f-fb7a-4bab-90ad-e4b6c22693ee" containerName="mariadb-account-create-update" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833028 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d61674f-fb7a-4bab-90ad-e4b6c22693ee" containerName="mariadb-account-create-update" Dec 05 19:34:35 crc kubenswrapper[4982]: E1205 19:34:35.833051 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="505849cd-c790-4007-bea2-c6fef9b2fba4" containerName="mariadb-database-create" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833060 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="505849cd-c790-4007-bea2-c6fef9b2fba4" containerName="mariadb-database-create" Dec 05 19:34:35 crc kubenswrapper[4982]: E1205 19:34:35.833073 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bae30fe-731a-4097-a55f-11dd857f5986" containerName="mariadb-account-create-update" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833079 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bae30fe-731a-4097-a55f-11dd857f5986" containerName="mariadb-account-create-update" Dec 05 19:34:35 crc kubenswrapper[4982]: E1205 19:34:35.833101 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af444583-8979-46e6-adc3-83cc5d6fbbcf" containerName="mariadb-database-create" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833108 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="af444583-8979-46e6-adc3-83cc5d6fbbcf" containerName="mariadb-database-create" Dec 05 19:34:35 crc kubenswrapper[4982]: E1205 19:34:35.833117 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31ed0f9e-af97-4842-8cbf-736e4d9d9a68" containerName="mariadb-database-create" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833125 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="31ed0f9e-af97-4842-8cbf-736e4d9d9a68" containerName="mariadb-database-create" Dec 05 19:34:35 crc kubenswrapper[4982]: E1205 19:34:35.833141 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a" containerName="mariadb-account-create-update" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833192 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a" containerName="mariadb-account-create-update" Dec 05 19:34:35 crc kubenswrapper[4982]: E1205 19:34:35.833217 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a404304e-f5bf-4931-8577-9161a96cfd8d" containerName="glance-db-sync" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833225 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a404304e-f5bf-4931-8577-9161a96cfd8d" containerName="glance-db-sync" Dec 05 19:34:35 crc kubenswrapper[4982]: E1205 19:34:35.833237 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7323378c-0ffc-47fa-88a2-666178163a52" containerName="mariadb-account-create-update" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833246 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7323378c-0ffc-47fa-88a2-666178163a52" containerName="mariadb-account-create-update" Dec 05 19:34:35 crc kubenswrapper[4982]: E1205 19:34:35.833263 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3825032-58f5-4494-ba6f-984b83186c7b" containerName="ovn-config" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833271 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3825032-58f5-4494-ba6f-984b83186c7b" containerName="ovn-config" Dec 05 19:34:35 crc kubenswrapper[4982]: E1205 19:34:35.833285 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="434a5cc4-eda4-40f7-a763-c4c61ba909fb" containerName="mariadb-database-create" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833293 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="434a5cc4-eda4-40f7-a763-c4c61ba909fb" containerName="mariadb-database-create" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833499 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a" containerName="mariadb-account-create-update" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833525 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d61674f-fb7a-4bab-90ad-e4b6c22693ee" containerName="mariadb-account-create-update" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833539 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bae30fe-731a-4097-a55f-11dd857f5986" containerName="mariadb-account-create-update" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833558 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3825032-58f5-4494-ba6f-984b83186c7b" containerName="ovn-config" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833575 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="505849cd-c790-4007-bea2-c6fef9b2fba4" containerName="mariadb-database-create" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833593 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a404304e-f5bf-4931-8577-9161a96cfd8d" containerName="glance-db-sync" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833607 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="31ed0f9e-af97-4842-8cbf-736e4d9d9a68" containerName="mariadb-database-create" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833617 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7323378c-0ffc-47fa-88a2-666178163a52" containerName="mariadb-account-create-update" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833630 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="af444583-8979-46e6-adc3-83cc5d6fbbcf" containerName="mariadb-database-create" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.833640 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="434a5cc4-eda4-40f7-a763-c4c61ba909fb" containerName="mariadb-database-create" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.834875 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.877377 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-6rm75"] Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.945263 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.945360 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74tzr\" (UniqueName: \"kubernetes.io/projected/48b84229-9e2d-4f53-948b-de4d30ba18bb-kube-api-access-74tzr\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.945387 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.945411 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.945448 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-dns-svc\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:35 crc kubenswrapper[4982]: I1205 19:34:35.945541 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-config\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.047205 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-config\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.047300 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.047333 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74tzr\" (UniqueName: \"kubernetes.io/projected/48b84229-9e2d-4f53-948b-de4d30ba18bb-kube-api-access-74tzr\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.047351 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.047368 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.047394 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-dns-svc\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.048360 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-dns-swift-storage-0\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.048385 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-ovsdbserver-nb\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.049735 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-ovsdbserver-sb\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.049759 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-dns-svc\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.051071 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-config\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.063655 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74tzr\" (UniqueName: \"kubernetes.io/projected/48b84229-9e2d-4f53-948b-de4d30ba18bb-kube-api-access-74tzr\") pod \"dnsmasq-dns-895cf5cf-6rm75\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.155369 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.329999 4982 generic.go:334] "Generic (PLEG): container finished" podID="6572d0e2-18b4-4e4e-8671-1e897b89dcdd" containerID="8ef716c915abf9a1a3d7d4d21d66fa2bbfa557383069b295d882b8f94e4a9c2c" exitCode=0 Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.330091 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" event={"ID":"6572d0e2-18b4-4e4e-8671-1e897b89dcdd","Type":"ContainerDied","Data":"8ef716c915abf9a1a3d7d4d21d66fa2bbfa557383069b295d882b8f94e4a9c2c"} Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.643042 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-6rm75"] Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.777988 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.822621 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.863653 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-ovsdbserver-nb\") pod \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.863984 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/666982dd-eefa-464c-ae19-7b0ffcabcf07-combined-ca-bundle\") pod \"666982dd-eefa-464c-ae19-7b0ffcabcf07\" (UID: \"666982dd-eefa-464c-ae19-7b0ffcabcf07\") " Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.864027 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/666982dd-eefa-464c-ae19-7b0ffcabcf07-config-data\") pod \"666982dd-eefa-464c-ae19-7b0ffcabcf07\" (UID: \"666982dd-eefa-464c-ae19-7b0ffcabcf07\") " Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.864048 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-config\") pod \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.864106 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clvtm\" (UniqueName: \"kubernetes.io/projected/666982dd-eefa-464c-ae19-7b0ffcabcf07-kube-api-access-clvtm\") pod \"666982dd-eefa-464c-ae19-7b0ffcabcf07\" (UID: \"666982dd-eefa-464c-ae19-7b0ffcabcf07\") " Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.864123 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-md7n9\" (UniqueName: \"kubernetes.io/projected/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-kube-api-access-md7n9\") pod \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.864201 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-ovsdbserver-sb\") pod \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.864260 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-dns-svc\") pod \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.864328 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-dns-swift-storage-0\") pod \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\" (UID: \"6572d0e2-18b4-4e4e-8671-1e897b89dcdd\") " Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.874374 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-kube-api-access-md7n9" (OuterVolumeSpecName: "kube-api-access-md7n9") pod "6572d0e2-18b4-4e4e-8671-1e897b89dcdd" (UID: "6572d0e2-18b4-4e4e-8671-1e897b89dcdd"). InnerVolumeSpecName "kube-api-access-md7n9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.875272 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/666982dd-eefa-464c-ae19-7b0ffcabcf07-kube-api-access-clvtm" (OuterVolumeSpecName: "kube-api-access-clvtm") pod "666982dd-eefa-464c-ae19-7b0ffcabcf07" (UID: "666982dd-eefa-464c-ae19-7b0ffcabcf07"). InnerVolumeSpecName "kube-api-access-clvtm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.942987 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/666982dd-eefa-464c-ae19-7b0ffcabcf07-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "666982dd-eefa-464c-ae19-7b0ffcabcf07" (UID: "666982dd-eefa-464c-ae19-7b0ffcabcf07"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.967182 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/666982dd-eefa-464c-ae19-7b0ffcabcf07-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.967218 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clvtm\" (UniqueName: \"kubernetes.io/projected/666982dd-eefa-464c-ae19-7b0ffcabcf07-kube-api-access-clvtm\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.967235 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-md7n9\" (UniqueName: \"kubernetes.io/projected/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-kube-api-access-md7n9\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.985209 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6572d0e2-18b4-4e4e-8671-1e897b89dcdd" (UID: "6572d0e2-18b4-4e4e-8671-1e897b89dcdd"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.987778 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6572d0e2-18b4-4e4e-8671-1e897b89dcdd" (UID: "6572d0e2-18b4-4e4e-8671-1e897b89dcdd"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.988199 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6572d0e2-18b4-4e4e-8671-1e897b89dcdd" (UID: "6572d0e2-18b4-4e4e-8671-1e897b89dcdd"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.991907 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-config" (OuterVolumeSpecName: "config") pod "6572d0e2-18b4-4e4e-8671-1e897b89dcdd" (UID: "6572d0e2-18b4-4e4e-8671-1e897b89dcdd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.993321 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/666982dd-eefa-464c-ae19-7b0ffcabcf07-config-data" (OuterVolumeSpecName: "config-data") pod "666982dd-eefa-464c-ae19-7b0ffcabcf07" (UID: "666982dd-eefa-464c-ae19-7b0ffcabcf07"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:34:36 crc kubenswrapper[4982]: I1205 19:34:36.995509 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6572d0e2-18b4-4e4e-8671-1e897b89dcdd" (UID: "6572d0e2-18b4-4e4e-8671-1e897b89dcdd"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.069536 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.069587 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.069599 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/666982dd-eefa-464c-ae19-7b0ffcabcf07-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.069608 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.069649 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.069662 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6572d0e2-18b4-4e4e-8671-1e897b89dcdd-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.340832 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" event={"ID":"6572d0e2-18b4-4e4e-8671-1e897b89dcdd","Type":"ContainerDied","Data":"fbe381cf996b4325b2ead65e326dde10c8202c599881efb6535f0a5443a22461"} Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.340893 4982 scope.go:117] "RemoveContainer" containerID="8ef716c915abf9a1a3d7d4d21d66fa2bbfa557383069b295d882b8f94e4a9c2c" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.340903 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-2q5jw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.343504 4982 generic.go:334] "Generic (PLEG): container finished" podID="48b84229-9e2d-4f53-948b-de4d30ba18bb" containerID="75e640690f49c237d335d904f6200b9cfb921f1177d7dafcb344122b5e48b659" exitCode=0 Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.343580 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-6rm75" event={"ID":"48b84229-9e2d-4f53-948b-de4d30ba18bb","Type":"ContainerDied","Data":"75e640690f49c237d335d904f6200b9cfb921f1177d7dafcb344122b5e48b659"} Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.343613 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-6rm75" event={"ID":"48b84229-9e2d-4f53-948b-de4d30ba18bb","Type":"ContainerStarted","Data":"ed9be1bdd15bcc2b3822c37b2cba63d2f0c9bd177c657e866f67d22366aebf79"} Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.347698 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-dgbrk" event={"ID":"666982dd-eefa-464c-ae19-7b0ffcabcf07","Type":"ContainerDied","Data":"a70148b7778dccafd4d7eb5e4a1dda32f3d69e621d7083def3bc648701b1e1a1"} Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.347732 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a70148b7778dccafd4d7eb5e4a1dda32f3d69e621d7083def3bc648701b1e1a1" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.347734 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-dgbrk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.371209 4982 scope.go:117] "RemoveContainer" containerID="db18d66ccfd2c3aa8f06479440109f9a50c5a0ffbb3b3d606695ff19f029075f" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.584402 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-6rm75"] Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.612211 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-l9gdk"] Dec 05 19:34:37 crc kubenswrapper[4982]: E1205 19:34:37.612790 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6572d0e2-18b4-4e4e-8671-1e897b89dcdd" containerName="init" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.612858 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6572d0e2-18b4-4e4e-8671-1e897b89dcdd" containerName="init" Dec 05 19:34:37 crc kubenswrapper[4982]: E1205 19:34:37.612941 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="666982dd-eefa-464c-ae19-7b0ffcabcf07" containerName="keystone-db-sync" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.612991 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="666982dd-eefa-464c-ae19-7b0ffcabcf07" containerName="keystone-db-sync" Dec 05 19:34:37 crc kubenswrapper[4982]: E1205 19:34:37.613078 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6572d0e2-18b4-4e4e-8671-1e897b89dcdd" containerName="dnsmasq-dns" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.613126 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6572d0e2-18b4-4e4e-8671-1e897b89dcdd" containerName="dnsmasq-dns" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.613366 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6572d0e2-18b4-4e4e-8671-1e897b89dcdd" containerName="dnsmasq-dns" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.613435 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="666982dd-eefa-464c-ae19-7b0ffcabcf07" containerName="keystone-db-sync" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.614138 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.619636 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-knkd7" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.619844 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.619953 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.620065 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.620090 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.628209 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-l9gdk"] Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.648612 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-r4bfw"] Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.651919 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.676054 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-r4bfw"] Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.685907 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.686107 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-fernet-keys\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.686214 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-combined-ca-bundle\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.686309 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-credential-keys\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.686371 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-config-data\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.686442 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtq94\" (UniqueName: \"kubernetes.io/projected/77a05301-23c6-48b2-8351-5b02e3a0751f-kube-api-access-jtq94\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.686528 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.686592 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-config\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.686648 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.686716 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-scripts\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.686820 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jj9wg\" (UniqueName: \"kubernetes.io/projected/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-kube-api-access-jj9wg\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.686885 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.702208 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-2q5jw"] Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.720220 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-2q5jw"] Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.807385 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.807459 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.807528 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-fernet-keys\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.807599 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-combined-ca-bundle\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.807749 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-credential-keys\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.807790 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-config-data\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.807824 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtq94\" (UniqueName: \"kubernetes.io/projected/77a05301-23c6-48b2-8351-5b02e3a0751f-kube-api-access-jtq94\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.807917 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.807954 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-config\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.807997 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.808061 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-scripts\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.808223 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jj9wg\" (UniqueName: \"kubernetes.io/projected/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-kube-api-access-jj9wg\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.808749 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-dns-svc\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.810970 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-ovsdbserver-sb\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.818063 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-dns-swift-storage-0\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.828958 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-config-data\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.829358 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-config\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.831342 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-ovsdbserver-nb\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.839896 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-scripts\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.846862 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-fernet-keys\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.847538 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-credential-keys\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.852786 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtq94\" (UniqueName: \"kubernetes.io/projected/77a05301-23c6-48b2-8351-5b02e3a0751f-kube-api-access-jtq94\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.856278 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-combined-ca-bundle\") pod \"keystone-bootstrap-l9gdk\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.867355 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-qb5jh"] Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.868502 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jj9wg\" (UniqueName: \"kubernetes.io/projected/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-kube-api-access-jj9wg\") pod \"dnsmasq-dns-6c9c9f998c-r4bfw\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.880068 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.903428 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.903661 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.903715 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-htk2q" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.910183 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-combined-ca-bundle\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.910247 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/253ffb42-0135-4d3b-b21c-0810b4591a69-etc-machine-id\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.910299 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7qrv\" (UniqueName: \"kubernetes.io/projected/253ffb42-0135-4d3b-b21c-0810b4591a69-kube-api-access-m7qrv\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.910343 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-db-sync-config-data\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.910378 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-config-data\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.910403 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-scripts\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.927261 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-qb5jh"] Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.939161 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.988035 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.997486 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-nt8vq"] Dec 05 19:34:37 crc kubenswrapper[4982]: I1205 19:34:37.998922 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.014125 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-scripts\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.014240 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-combined-ca-bundle\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.014276 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-config\") pod \"neutron-db-sync-nt8vq\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.014325 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/253ffb42-0135-4d3b-b21c-0810b4591a69-etc-machine-id\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.014396 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7qrv\" (UniqueName: \"kubernetes.io/projected/253ffb42-0135-4d3b-b21c-0810b4591a69-kube-api-access-m7qrv\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.014436 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s87gj\" (UniqueName: \"kubernetes.io/projected/3a107703-d667-459a-862f-9ba79f86042f-kube-api-access-s87gj\") pod \"neutron-db-sync-nt8vq\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.014477 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-db-sync-config-data\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.014511 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-combined-ca-bundle\") pod \"neutron-db-sync-nt8vq\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.014542 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-config-data\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.015740 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/253ffb42-0135-4d3b-b21c-0810b4591a69-etc-machine-id\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.026549 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-db-sync-config-data\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.029039 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-scripts\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.032418 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-combined-ca-bundle\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.032483 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-nt8vq"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.034918 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-config-data\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.038292 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.038343 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-kbnmd" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.038303 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.073329 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-6vvtc"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.074642 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.090598 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.090790 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-4xwqm" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.092219 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-6vvtc"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.118308 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97stg\" (UniqueName: \"kubernetes.io/projected/641d839f-9ca5-4835-ba20-2c6981a00df3-kube-api-access-97stg\") pod \"barbican-db-sync-6vvtc\" (UID: \"641d839f-9ca5-4835-ba20-2c6981a00df3\") " pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.118369 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-config\") pod \"neutron-db-sync-nt8vq\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.118449 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s87gj\" (UniqueName: \"kubernetes.io/projected/3a107703-d667-459a-862f-9ba79f86042f-kube-api-access-s87gj\") pod \"neutron-db-sync-nt8vq\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.118496 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-combined-ca-bundle\") pod \"neutron-db-sync-nt8vq\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.118534 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/641d839f-9ca5-4835-ba20-2c6981a00df3-db-sync-config-data\") pod \"barbican-db-sync-6vvtc\" (UID: \"641d839f-9ca5-4835-ba20-2c6981a00df3\") " pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.118551 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/641d839f-9ca5-4835-ba20-2c6981a00df3-combined-ca-bundle\") pod \"barbican-db-sync-6vvtc\" (UID: \"641d839f-9ca5-4835-ba20-2c6981a00df3\") " pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.135863 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-config\") pod \"neutron-db-sync-nt8vq\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.139532 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-combined-ca-bundle\") pod \"neutron-db-sync-nt8vq\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.156932 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.167050 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.169956 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7qrv\" (UniqueName: \"kubernetes.io/projected/253ffb42-0135-4d3b-b21c-0810b4591a69-kube-api-access-m7qrv\") pod \"cinder-db-sync-qb5jh\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.189827 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.190071 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.205641 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-r4bfw"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.221116 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-config-data\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.221334 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-scripts\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.221373 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36cc8bb3-de84-43c8-9b70-f14d2532598b-log-httpd\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.221442 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.221467 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6b754\" (UniqueName: \"kubernetes.io/projected/36cc8bb3-de84-43c8-9b70-f14d2532598b-kube-api-access-6b754\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.221568 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.221707 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/641d839f-9ca5-4835-ba20-2c6981a00df3-db-sync-config-data\") pod \"barbican-db-sync-6vvtc\" (UID: \"641d839f-9ca5-4835-ba20-2c6981a00df3\") " pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.221746 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/641d839f-9ca5-4835-ba20-2c6981a00df3-combined-ca-bundle\") pod \"barbican-db-sync-6vvtc\" (UID: \"641d839f-9ca5-4835-ba20-2c6981a00df3\") " pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.221872 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36cc8bb3-de84-43c8-9b70-f14d2532598b-run-httpd\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.221897 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97stg\" (UniqueName: \"kubernetes.io/projected/641d839f-9ca5-4835-ba20-2c6981a00df3-kube-api-access-97stg\") pod \"barbican-db-sync-6vvtc\" (UID: \"641d839f-9ca5-4835-ba20-2c6981a00df3\") " pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.225121 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.240428 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-k7wz7"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.242914 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.247532 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s87gj\" (UniqueName: \"kubernetes.io/projected/3a107703-d667-459a-862f-9ba79f86042f-kube-api-access-s87gj\") pod \"neutron-db-sync-nt8vq\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.257960 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.258179 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-gdhhg" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.258309 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.260075 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/641d839f-9ca5-4835-ba20-2c6981a00df3-combined-ca-bundle\") pod \"barbican-db-sync-6vvtc\" (UID: \"641d839f-9ca5-4835-ba20-2c6981a00df3\") " pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.260192 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/641d839f-9ca5-4835-ba20-2c6981a00df3-db-sync-config-data\") pod \"barbican-db-sync-6vvtc\" (UID: \"641d839f-9ca5-4835-ba20-2c6981a00df3\") " pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.289250 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97stg\" (UniqueName: \"kubernetes.io/projected/641d839f-9ca5-4835-ba20-2c6981a00df3-kube-api-access-97stg\") pod \"barbican-db-sync-6vvtc\" (UID: \"641d839f-9ca5-4835-ba20-2c6981a00df3\") " pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.308269 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-k7wz7"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.325517 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-scripts\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.325576 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36cc8bb3-de84-43c8-9b70-f14d2532598b-run-httpd\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.325604 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-config-data\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.325644 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-scripts\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.325671 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36cc8bb3-de84-43c8-9b70-f14d2532598b-log-httpd\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.325690 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/075da87a-0ab9-462b-9435-5881b90bd9a3-logs\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.325723 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.325743 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6b754\" (UniqueName: \"kubernetes.io/projected/36cc8bb3-de84-43c8-9b70-f14d2532598b-kube-api-access-6b754\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.325760 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-config-data\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.325801 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-combined-ca-bundle\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.325820 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.325848 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjm4c\" (UniqueName: \"kubernetes.io/projected/075da87a-0ab9-462b-9435-5881b90bd9a3-kube-api-access-jjm4c\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.326399 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36cc8bb3-de84-43c8-9b70-f14d2532598b-run-httpd\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.327510 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36cc8bb3-de84-43c8-9b70-f14d2532598b-log-httpd\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.333835 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.344388 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.344813 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-scripts\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.345350 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-config-data\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.363276 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.367141 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-wg4zp"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.368848 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.385542 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6b754\" (UniqueName: \"kubernetes.io/projected/36cc8bb3-de84-43c8-9b70-f14d2532598b-kube-api-access-6b754\") pod \"ceilometer-0\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.396321 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.427568 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.427645 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjm4c\" (UniqueName: \"kubernetes.io/projected/075da87a-0ab9-462b-9435-5881b90bd9a3-kube-api-access-jjm4c\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.427753 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-scripts\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.427961 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/075da87a-0ab9-462b-9435-5881b90bd9a3-logs\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.428345 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.432404 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-wg4zp"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.432693 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/075da87a-0ab9-462b-9435-5881b90bd9a3-logs\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.436677 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g85fg\" (UniqueName: \"kubernetes.io/projected/63edd3ae-b9c8-454c-a132-9ebd454761ce-kube-api-access-g85fg\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.436744 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-config-data\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.436930 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.436987 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-config\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.437025 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.437052 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-combined-ca-bundle\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.451727 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-scripts\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.455272 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.457230 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-hf7s9"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.457574 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-895cf5cf-6rm75" podUID="48b84229-9e2d-4f53-948b-de4d30ba18bb" containerName="dnsmasq-dns" containerID="cri-o://7c8590bc09071642a1fa0cab2e2b71c6bbbfdaada42cf4165a4fa7597870c870" gracePeriod=10 Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.458494 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.458516 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-6rm75" event={"ID":"48b84229-9e2d-4f53-948b-de4d30ba18bb","Type":"ContainerStarted","Data":"7c8590bc09071642a1fa0cab2e2b71c6bbbfdaada42cf4165a4fa7597870c870"} Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.458596 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.461346 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-combined-ca-bundle\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.483759 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-hf7s9"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.484942 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.484952 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-xxdrk" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.485354 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.484945 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-config-data\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.485716 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.508896 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.510088 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjm4c\" (UniqueName: \"kubernetes.io/projected/075da87a-0ab9-462b-9435-5881b90bd9a3-kube-api-access-jjm4c\") pod \"placement-db-sync-k7wz7\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.546986 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.547066 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g85fg\" (UniqueName: \"kubernetes.io/projected/63edd3ae-b9c8-454c-a132-9ebd454761ce-kube-api-access-g85fg\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.547131 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.547165 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-config\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.547179 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.547203 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.548302 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-dns-swift-storage-0\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.548849 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-ovsdbserver-sb\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.550407 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-ovsdbserver-nb\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.550844 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-dns-svc\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.554244 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-895cf5cf-6rm75" podStartSLOduration=3.554232461 podStartE2EDuration="3.554232461s" podCreationTimestamp="2025-12-05 19:34:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:38.514770028 +0000 UTC m=+1257.396656023" watchObservedRunningTime="2025-12-05 19:34:38.554232461 +0000 UTC m=+1257.436118456" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.565802 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-config\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.580134 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g85fg\" (UniqueName: \"kubernetes.io/projected/63edd3ae-b9c8-454c-a132-9ebd454761ce-kube-api-access-g85fg\") pod \"dnsmasq-dns-57c957c4ff-wg4zp\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.649543 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-config-data\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.651708 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-combined-ca-bundle\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.651782 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-certs\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.651813 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-scripts\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.652063 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6m7v\" (UniqueName: \"kubernetes.io/projected/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-kube-api-access-h6m7v\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.726484 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.729109 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.731038 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-77chn" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.739492 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.739729 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.755974 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-combined-ca-bundle\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.756026 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-certs\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.756048 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-scripts\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.756120 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6m7v\" (UniqueName: \"kubernetes.io/projected/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-kube-api-access-h6m7v\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.756176 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-config-data\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.757309 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.759962 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-certs\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.760193 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-combined-ca-bundle\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.759647 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-scripts\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.761125 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-config-data\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.792010 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6m7v\" (UniqueName: \"kubernetes.io/projected/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-kube-api-access-h6m7v\") pod \"cloudkitty-db-sync-hf7s9\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.808338 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-k7wz7" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.828204 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.829094 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.853554 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.856467 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.858538 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f789c61-3a28-4597-b05b-0d9e70ad55b2-logs\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.858706 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.858738 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.858768 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f789c61-3a28-4597-b05b-0d9e70ad55b2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.858794 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-config-data\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.858816 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wq7n\" (UniqueName: \"kubernetes.io/projected/0f789c61-3a28-4597-b05b-0d9e70ad55b2-kube-api-access-4wq7n\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.858843 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-scripts\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.867831 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.871666 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.963040 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.963334 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-logs\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.963416 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.963458 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.963517 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.963534 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.963689 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f789c61-3a28-4597-b05b-0d9e70ad55b2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.963723 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-config-data\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.963755 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wq7n\" (UniqueName: \"kubernetes.io/projected/0f789c61-3a28-4597-b05b-0d9e70ad55b2-kube-api-access-4wq7n\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.963793 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-scripts\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.965451 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.965537 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f789c61-3a28-4597-b05b-0d9e70ad55b2-logs\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.965587 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.965697 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw7cr\" (UniqueName: \"kubernetes.io/projected/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-kube-api-access-fw7cr\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.970312 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f789c61-3a28-4597-b05b-0d9e70ad55b2-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.976974 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.977014 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/adcd8f961212518a09eddf140065c73138e877cdf387d14b4b72fd8f3cd3396b/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.977475 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f789c61-3a28-4597-b05b-0d9e70ad55b2-logs\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.978843 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-config-data\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.982860 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:38 crc kubenswrapper[4982]: I1205 19:34:38.992738 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-r4bfw"] Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.001760 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-scripts\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.015232 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-l9gdk"] Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.018501 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wq7n\" (UniqueName: \"kubernetes.io/projected/0f789c61-3a28-4597-b05b-0d9e70ad55b2-kube-api-access-4wq7n\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.067207 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"glance-default-external-api-0\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " pod="openstack/glance-default-external-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.067252 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.067318 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw7cr\" (UniqueName: \"kubernetes.io/projected/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-kube-api-access-fw7cr\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.067365 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.067379 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-logs\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.067408 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.067438 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.067493 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.067858 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.068061 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-logs\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.069958 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.069991 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/31401ebfd1660da8e240395dcb349960c63d08b34963a13b7abf991967d4dead/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.074333 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-scripts\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.074913 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.076565 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-config-data\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.085119 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw7cr\" (UniqueName: \"kubernetes.io/projected/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-kube-api-access-fw7cr\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.099376 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-qb5jh"] Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.110483 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.196702 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.407339 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6572d0e2-18b4-4e4e-8671-1e897b89dcdd" path="/var/lib/kubelet/pods/6572d0e2-18b4-4e4e-8671-1e897b89dcdd/volumes" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.477341 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.516026 4982 generic.go:334] "Generic (PLEG): container finished" podID="48b84229-9e2d-4f53-948b-de4d30ba18bb" containerID="7c8590bc09071642a1fa0cab2e2b71c6bbbfdaada42cf4165a4fa7597870c870" exitCode=0 Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.516283 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-6rm75" event={"ID":"48b84229-9e2d-4f53-948b-de4d30ba18bb","Type":"ContainerDied","Data":"7c8590bc09071642a1fa0cab2e2b71c6bbbfdaada42cf4165a4fa7597870c870"} Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.523041 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qb5jh" event={"ID":"253ffb42-0135-4d3b-b21c-0810b4591a69","Type":"ContainerStarted","Data":"b2972c8e05a1fd06548ad3e10d8be2a7fe6d72b692c52703e7cf00bab06eae1c"} Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.527080 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-l9gdk" event={"ID":"77a05301-23c6-48b2-8351-5b02e3a0751f","Type":"ContainerStarted","Data":"7fc1f97de47a08c22fed16ed843a4c2ed84801939355a567668109857bfd5ac6"} Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.530345 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" event={"ID":"985abcdf-9c5f-4522-9b59-77d69ca8ec9a","Type":"ContainerStarted","Data":"d8944f3ecc7a841fe63aecd0fe93bc429c09e67b2d010674116eecb733db3a0c"} Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.698879 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-6vvtc"] Dec 05 19:34:39 crc kubenswrapper[4982]: W1205 19:34:39.703866 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod641d839f_9ca5_4835_ba20_2c6981a00df3.slice/crio-51a76ae3ae48e7f961fcf967baca18e806630b5e88c75ba49b5231ab4b6b5f83 WatchSource:0}: Error finding container 51a76ae3ae48e7f961fcf967baca18e806630b5e88c75ba49b5231ab4b6b5f83: Status 404 returned error can't find the container with id 51a76ae3ae48e7f961fcf967baca18e806630b5e88c75ba49b5231ab4b6b5f83 Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.714165 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-nt8vq"] Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.728942 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-k7wz7"] Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.742976 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:34:39 crc kubenswrapper[4982]: W1205 19:34:39.744362 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod36cc8bb3_de84_43c8_9b70_f14d2532598b.slice/crio-32807b1b9bd47626b97b76a69bfc4bded6d89df6a134efdc1abb3ff9dd751a80 WatchSource:0}: Error finding container 32807b1b9bd47626b97b76a69bfc4bded6d89df6a134efdc1abb3ff9dd751a80: Status 404 returned error can't find the container with id 32807b1b9bd47626b97b76a69bfc4bded6d89df6a134efdc1abb3ff9dd751a80 Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.758548 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.891302 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-config\") pod \"48b84229-9e2d-4f53-948b-de4d30ba18bb\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.891381 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74tzr\" (UniqueName: \"kubernetes.io/projected/48b84229-9e2d-4f53-948b-de4d30ba18bb-kube-api-access-74tzr\") pod \"48b84229-9e2d-4f53-948b-de4d30ba18bb\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.891481 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-ovsdbserver-nb\") pod \"48b84229-9e2d-4f53-948b-de4d30ba18bb\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.891571 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-dns-swift-storage-0\") pod \"48b84229-9e2d-4f53-948b-de4d30ba18bb\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.891606 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-dns-svc\") pod \"48b84229-9e2d-4f53-948b-de4d30ba18bb\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.891656 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-ovsdbserver-sb\") pod \"48b84229-9e2d-4f53-948b-de4d30ba18bb\" (UID: \"48b84229-9e2d-4f53-948b-de4d30ba18bb\") " Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.901767 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48b84229-9e2d-4f53-948b-de4d30ba18bb-kube-api-access-74tzr" (OuterVolumeSpecName: "kube-api-access-74tzr") pod "48b84229-9e2d-4f53-948b-de4d30ba18bb" (UID: "48b84229-9e2d-4f53-948b-de4d30ba18bb"). InnerVolumeSpecName "kube-api-access-74tzr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.905455 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-wg4zp"] Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.990244 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "48b84229-9e2d-4f53-948b-de4d30ba18bb" (UID: "48b84229-9e2d-4f53-948b-de4d30ba18bb"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.995099 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74tzr\" (UniqueName: \"kubernetes.io/projected/48b84229-9e2d-4f53-948b-de4d30ba18bb-kube-api-access-74tzr\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:39 crc kubenswrapper[4982]: I1205 19:34:39.995120 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.024931 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "48b84229-9e2d-4f53-948b-de4d30ba18bb" (UID: "48b84229-9e2d-4f53-948b-de4d30ba18bb"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.025792 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "48b84229-9e2d-4f53-948b-de4d30ba18bb" (UID: "48b84229-9e2d-4f53-948b-de4d30ba18bb"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.033242 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "48b84229-9e2d-4f53-948b-de4d30ba18bb" (UID: "48b84229-9e2d-4f53-948b-de4d30ba18bb"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.042618 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-config" (OuterVolumeSpecName: "config") pod "48b84229-9e2d-4f53-948b-de4d30ba18bb" (UID: "48b84229-9e2d-4f53-948b-de4d30ba18bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.096706 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.096923 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.096932 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.096941 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/48b84229-9e2d-4f53-948b-de4d30ba18bb-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.097668 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-hf7s9"] Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.243010 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:34:40 crc kubenswrapper[4982]: W1205 19:34:40.255657 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f789c61_3a28_4597_b05b_0d9e70ad55b2.slice/crio-f783f02460f422e81947651a4a1b35eef8a76ec8953e0eac529131c5e13b2268 WatchSource:0}: Error finding container f783f02460f422e81947651a4a1b35eef8a76ec8953e0eac529131c5e13b2268: Status 404 returned error can't find the container with id f783f02460f422e81947651a4a1b35eef8a76ec8953e0eac529131c5e13b2268 Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.375708 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.558411 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-l9gdk" event={"ID":"77a05301-23c6-48b2-8351-5b02e3a0751f","Type":"ContainerStarted","Data":"a7f35ea5929175b17a3da02549a1fb7d078007dde24f98ca3dab4a52263be641"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.562170 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" event={"ID":"63edd3ae-b9c8-454c-a132-9ebd454761ce","Type":"ContainerStarted","Data":"c1ed987370df7ccaf870832354726d074b49ad774bf72f90c52d52a48ea57779"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.562210 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" event={"ID":"63edd3ae-b9c8-454c-a132-9ebd454761ce","Type":"ContainerStarted","Data":"dbcab1ee045056a3359d4312e0645168ad09ed0e1f5e4f7e386675189a149332"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.565604 4982 generic.go:334] "Generic (PLEG): container finished" podID="985abcdf-9c5f-4522-9b59-77d69ca8ec9a" containerID="eacb248aaf72645155c79ba06655e1816654f498b7756365349595b591e4c39e" exitCode=0 Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.565659 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" event={"ID":"985abcdf-9c5f-4522-9b59-77d69ca8ec9a","Type":"ContainerDied","Data":"eacb248aaf72645155c79ba06655e1816654f498b7756365349595b591e4c39e"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.567696 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-895cf5cf-6rm75" event={"ID":"48b84229-9e2d-4f53-948b-de4d30ba18bb","Type":"ContainerDied","Data":"ed9be1bdd15bcc2b3822c37b2cba63d2f0c9bd177c657e866f67d22366aebf79"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.567737 4982 scope.go:117] "RemoveContainer" containerID="7c8590bc09071642a1fa0cab2e2b71c6bbbfdaada42cf4165a4fa7597870c870" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.567849 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-895cf5cf-6rm75" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.577727 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36cc8bb3-de84-43c8-9b70-f14d2532598b","Type":"ContainerStarted","Data":"32807b1b9bd47626b97b76a69bfc4bded6d89df6a134efdc1abb3ff9dd751a80"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.611947 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-hf7s9" event={"ID":"58e0a579-42f9-40b8-a0b4-13902c0fe8c7","Type":"ContainerStarted","Data":"9ad1e62856059f48b0af068eb4f1bd5c5ed662cd8897f91dd1aab264d5db8438"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.612646 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-l9gdk" podStartSLOduration=3.612626525 podStartE2EDuration="3.612626525s" podCreationTimestamp="2025-12-05 19:34:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:40.580198609 +0000 UTC m=+1259.462084604" watchObservedRunningTime="2025-12-05 19:34:40.612626525 +0000 UTC m=+1259.494512520" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.621994 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nt8vq" event={"ID":"3a107703-d667-459a-862f-9ba79f86042f","Type":"ContainerStarted","Data":"d0364e692e0fe8dcc6576f50ff0a5d1b2769552586499e6cc15a11e6551b1070"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.622040 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nt8vq" event={"ID":"3a107703-d667-459a-862f-9ba79f86042f","Type":"ContainerStarted","Data":"2ced7be590641d988d96e9efbf9429fcb358d8ca3b36b6a946c6bde37e4948a5"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.627603 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6vvtc" event={"ID":"641d839f-9ca5-4835-ba20-2c6981a00df3","Type":"ContainerStarted","Data":"51a76ae3ae48e7f961fcf967baca18e806630b5e88c75ba49b5231ab4b6b5f83"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.657660 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0f789c61-3a28-4597-b05b-0d9e70ad55b2","Type":"ContainerStarted","Data":"f783f02460f422e81947651a4a1b35eef8a76ec8953e0eac529131c5e13b2268"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.671381 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1","Type":"ContainerStarted","Data":"e04bdd14fcaa43a7bf1e2fb4626daeb89b16883837bf0e6408005dcecd235347"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.694886 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-k7wz7" event={"ID":"075da87a-0ab9-462b-9435-5881b90bd9a3","Type":"ContainerStarted","Data":"a2bb78c16bf9d57a52508ab64e1f92ce9c935dd238ea9248439b36d708515b07"} Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.763268 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-nt8vq" podStartSLOduration=3.763248046 podStartE2EDuration="3.763248046s" podCreationTimestamp="2025-12-05 19:34:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:40.709891813 +0000 UTC m=+1259.591777808" watchObservedRunningTime="2025-12-05 19:34:40.763248046 +0000 UTC m=+1259.645134041" Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.833532 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-6rm75"] Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.852466 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-895cf5cf-6rm75"] Dec 05 19:34:40 crc kubenswrapper[4982]: I1205 19:34:40.988343 4982 scope.go:117] "RemoveContainer" containerID="75e640690f49c237d335d904f6200b9cfb921f1177d7dafcb344122b5e48b659" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.470184 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48b84229-9e2d-4f53-948b-de4d30ba18bb" path="/var/lib/kubelet/pods/48b84229-9e2d-4f53-948b-de4d30ba18bb/volumes" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.473884 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.574431 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-dns-svc\") pod \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.574830 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-dns-swift-storage-0\") pod \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.574892 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jj9wg\" (UniqueName: \"kubernetes.io/projected/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-kube-api-access-jj9wg\") pod \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.574920 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-ovsdbserver-sb\") pod \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.574980 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-ovsdbserver-nb\") pod \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.575007 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-config\") pod \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\" (UID: \"985abcdf-9c5f-4522-9b59-77d69ca8ec9a\") " Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.594076 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-kube-api-access-jj9wg" (OuterVolumeSpecName: "kube-api-access-jj9wg") pod "985abcdf-9c5f-4522-9b59-77d69ca8ec9a" (UID: "985abcdf-9c5f-4522-9b59-77d69ca8ec9a"). InnerVolumeSpecName "kube-api-access-jj9wg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.617136 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "985abcdf-9c5f-4522-9b59-77d69ca8ec9a" (UID: "985abcdf-9c5f-4522-9b59-77d69ca8ec9a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.634543 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "985abcdf-9c5f-4522-9b59-77d69ca8ec9a" (UID: "985abcdf-9c5f-4522-9b59-77d69ca8ec9a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.641617 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "985abcdf-9c5f-4522-9b59-77d69ca8ec9a" (UID: "985abcdf-9c5f-4522-9b59-77d69ca8ec9a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.653726 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-config" (OuterVolumeSpecName: "config") pod "985abcdf-9c5f-4522-9b59-77d69ca8ec9a" (UID: "985abcdf-9c5f-4522-9b59-77d69ca8ec9a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.657086 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "985abcdf-9c5f-4522-9b59-77d69ca8ec9a" (UID: "985abcdf-9c5f-4522-9b59-77d69ca8ec9a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.677987 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.678021 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jj9wg\" (UniqueName: \"kubernetes.io/projected/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-kube-api-access-jj9wg\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.678223 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.678236 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.678249 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.678311 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/985abcdf-9c5f-4522-9b59-77d69ca8ec9a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.756178 4982 generic.go:334] "Generic (PLEG): container finished" podID="63edd3ae-b9c8-454c-a132-9ebd454761ce" containerID="c1ed987370df7ccaf870832354726d074b49ad774bf72f90c52d52a48ea57779" exitCode=0 Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.756538 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" event={"ID":"63edd3ae-b9c8-454c-a132-9ebd454761ce","Type":"ContainerDied","Data":"c1ed987370df7ccaf870832354726d074b49ad774bf72f90c52d52a48ea57779"} Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.756593 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" event={"ID":"63edd3ae-b9c8-454c-a132-9ebd454761ce","Type":"ContainerStarted","Data":"62c345163f2b8059ac9d37944c1bfbb1d4e4df0503364852f8dbf651165506ba"} Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.756628 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.768416 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" event={"ID":"985abcdf-9c5f-4522-9b59-77d69ca8ec9a","Type":"ContainerDied","Data":"d8944f3ecc7a841fe63aecd0fe93bc429c09e67b2d010674116eecb733db3a0c"} Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.768471 4982 scope.go:117] "RemoveContainer" containerID="eacb248aaf72645155c79ba06655e1816654f498b7756365349595b591e4c39e" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.768430 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6c9c9f998c-r4bfw" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.801491 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" podStartSLOduration=3.801449286 podStartE2EDuration="3.801449286s" podCreationTimestamp="2025-12-05 19:34:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:41.792315326 +0000 UTC m=+1260.674201321" watchObservedRunningTime="2025-12-05 19:34:41.801449286 +0000 UTC m=+1260.683335281" Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.859257 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-r4bfw"] Dec 05 19:34:41 crc kubenswrapper[4982]: I1205 19:34:41.866589 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6c9c9f998c-r4bfw"] Dec 05 19:34:42 crc kubenswrapper[4982]: I1205 19:34:42.070217 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:34:42 crc kubenswrapper[4982]: I1205 19:34:42.095086 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:34:42 crc kubenswrapper[4982]: I1205 19:34:42.172340 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:34:42 crc kubenswrapper[4982]: I1205 19:34:42.812111 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0f789c61-3a28-4597-b05b-0d9e70ad55b2","Type":"ContainerStarted","Data":"de0a6e61c8dfb118b17b8d13a57868022f48557b8f8adc72876a38b05d1c2bf2"} Dec 05 19:34:42 crc kubenswrapper[4982]: I1205 19:34:42.816902 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1","Type":"ContainerStarted","Data":"210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d"} Dec 05 19:34:43 crc kubenswrapper[4982]: I1205 19:34:43.405817 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="985abcdf-9c5f-4522-9b59-77d69ca8ec9a" path="/var/lib/kubelet/pods/985abcdf-9c5f-4522-9b59-77d69ca8ec9a/volumes" Dec 05 19:34:43 crc kubenswrapper[4982]: I1205 19:34:43.835692 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1","Type":"ContainerStarted","Data":"fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976"} Dec 05 19:34:43 crc kubenswrapper[4982]: I1205 19:34:43.835873 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" containerName="glance-log" containerID="cri-o://210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d" gracePeriod=30 Dec 05 19:34:43 crc kubenswrapper[4982]: I1205 19:34:43.836375 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" containerName="glance-httpd" containerID="cri-o://fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976" gracePeriod=30 Dec 05 19:34:43 crc kubenswrapper[4982]: I1205 19:34:43.869315 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.869297148 podStartE2EDuration="6.869297148s" podCreationTimestamp="2025-12-05 19:34:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:43.860841175 +0000 UTC m=+1262.742727170" watchObservedRunningTime="2025-12-05 19:34:43.869297148 +0000 UTC m=+1262.751183143" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.817596 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.866804 4982 generic.go:334] "Generic (PLEG): container finished" podID="d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" containerID="fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976" exitCode=0 Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.866838 4982 generic.go:334] "Generic (PLEG): container finished" podID="d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" containerID="210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d" exitCode=143 Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.866881 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1","Type":"ContainerDied","Data":"fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976"} Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.866905 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1","Type":"ContainerDied","Data":"210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d"} Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.866914 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1","Type":"ContainerDied","Data":"e04bdd14fcaa43a7bf1e2fb4626daeb89b16883837bf0e6408005dcecd235347"} Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.867034 4982 scope.go:117] "RemoveContainer" containerID="fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.867203 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.872878 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0f789c61-3a28-4597-b05b-0d9e70ad55b2","Type":"ContainerStarted","Data":"d6e87a5f4c68b60ac572f97e881a1f80c94172957a8e7bde23ea8a074fa7ee8d"} Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.873039 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0f789c61-3a28-4597-b05b-0d9e70ad55b2" containerName="glance-log" containerID="cri-o://de0a6e61c8dfb118b17b8d13a57868022f48557b8f8adc72876a38b05d1c2bf2" gracePeriod=30 Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.873646 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="0f789c61-3a28-4597-b05b-0d9e70ad55b2" containerName="glance-httpd" containerID="cri-o://d6e87a5f4c68b60ac572f97e881a1f80c94172957a8e7bde23ea8a074fa7ee8d" gracePeriod=30 Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.898853 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=7.898836819 podStartE2EDuration="7.898836819s" podCreationTimestamp="2025-12-05 19:34:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:34:44.893663559 +0000 UTC m=+1263.775549554" watchObservedRunningTime="2025-12-05 19:34:44.898836819 +0000 UTC m=+1263.780722814" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.945526 4982 scope.go:117] "RemoveContainer" containerID="210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.970056 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-httpd-run\") pod \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.970121 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-logs\") pod \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.970309 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-combined-ca-bundle\") pod \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.970336 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-config-data\") pod \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.970531 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.970569 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fw7cr\" (UniqueName: \"kubernetes.io/projected/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-kube-api-access-fw7cr\") pod \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.970598 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-scripts\") pod \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\" (UID: \"d3137476-abcf-4f16-8d2e-0b7d4d6cceb1\") " Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.970593 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" (UID: "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.970793 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-logs" (OuterVolumeSpecName: "logs") pod "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" (UID: "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.971066 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.971084 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.971586 4982 scope.go:117] "RemoveContainer" containerID="fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976" Dec 05 19:34:44 crc kubenswrapper[4982]: E1205 19:34:44.973036 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976\": container with ID starting with fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976 not found: ID does not exist" containerID="fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.973066 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976"} err="failed to get container status \"fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976\": rpc error: code = NotFound desc = could not find container \"fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976\": container with ID starting with fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976 not found: ID does not exist" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.973090 4982 scope.go:117] "RemoveContainer" containerID="210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d" Dec 05 19:34:44 crc kubenswrapper[4982]: E1205 19:34:44.973620 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d\": container with ID starting with 210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d not found: ID does not exist" containerID="210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.973709 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d"} err="failed to get container status \"210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d\": rpc error: code = NotFound desc = could not find container \"210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d\": container with ID starting with 210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d not found: ID does not exist" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.973731 4982 scope.go:117] "RemoveContainer" containerID="fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.974563 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976"} err="failed to get container status \"fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976\": rpc error: code = NotFound desc = could not find container \"fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976\": container with ID starting with fdc5413e17d8a796dc39317a1e27f72eae2f5b75e229284684207024194b7976 not found: ID does not exist" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.974608 4982 scope.go:117] "RemoveContainer" containerID="210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.974913 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d"} err="failed to get container status \"210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d\": rpc error: code = NotFound desc = could not find container \"210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d\": container with ID starting with 210de3f1f2cc5a95b5aaf82a1079a01eb27a461f5b83c6f10eeb0eecdf0a3c4d not found: ID does not exist" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.976466 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-scripts" (OuterVolumeSpecName: "scripts") pod "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" (UID: "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:34:44 crc kubenswrapper[4982]: I1205 19:34:44.989655 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-kube-api-access-fw7cr" (OuterVolumeSpecName: "kube-api-access-fw7cr") pod "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" (UID: "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1"). InnerVolumeSpecName "kube-api-access-fw7cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.022938 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b" (OuterVolumeSpecName: "glance") pod "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" (UID: "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1"). InnerVolumeSpecName "pvc-9fbefc77-3371-4465-80eb-112fbd1c349b". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.026654 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" (UID: "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.063487 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-config-data" (OuterVolumeSpecName: "config-data") pod "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" (UID: "d3137476-abcf-4f16-8d2e-0b7d4d6cceb1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.072927 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") on node \"crc\" " Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.072970 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fw7cr\" (UniqueName: \"kubernetes.io/projected/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-kube-api-access-fw7cr\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.072985 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.072996 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.073008 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.108122 4982 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.108306 4982 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-9fbefc77-3371-4465-80eb-112fbd1c349b" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b") on node "crc" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.175137 4982 reconciler_common.go:293] "Volume detached for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") on node \"crc\" DevicePath \"\"" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.301164 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.321209 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.336874 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:34:45 crc kubenswrapper[4982]: E1205 19:34:45.337390 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48b84229-9e2d-4f53-948b-de4d30ba18bb" containerName="dnsmasq-dns" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.337412 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="48b84229-9e2d-4f53-948b-de4d30ba18bb" containerName="dnsmasq-dns" Dec 05 19:34:45 crc kubenswrapper[4982]: E1205 19:34:45.337426 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="985abcdf-9c5f-4522-9b59-77d69ca8ec9a" containerName="init" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.337434 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="985abcdf-9c5f-4522-9b59-77d69ca8ec9a" containerName="init" Dec 05 19:34:45 crc kubenswrapper[4982]: E1205 19:34:45.337446 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48b84229-9e2d-4f53-948b-de4d30ba18bb" containerName="init" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.337454 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="48b84229-9e2d-4f53-948b-de4d30ba18bb" containerName="init" Dec 05 19:34:45 crc kubenswrapper[4982]: E1205 19:34:45.337476 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" containerName="glance-httpd" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.337485 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" containerName="glance-httpd" Dec 05 19:34:45 crc kubenswrapper[4982]: E1205 19:34:45.337542 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" containerName="glance-log" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.337552 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" containerName="glance-log" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.337778 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="48b84229-9e2d-4f53-948b-de4d30ba18bb" containerName="dnsmasq-dns" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.337799 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="985abcdf-9c5f-4522-9b59-77d69ca8ec9a" containerName="init" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.337825 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" containerName="glance-httpd" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.337839 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" containerName="glance-log" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.339138 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.342375 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.359792 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.425313 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3137476-abcf-4f16-8d2e-0b7d4d6cceb1" path="/var/lib/kubelet/pods/d3137476-abcf-4f16-8d2e-0b7d4d6cceb1/volumes" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.485630 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.485734 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.485758 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7998d8ba-9473-4cb5-abeb-29654115491f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.486612 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.487033 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.487072 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7998d8ba-9473-4cb5-abeb-29654115491f-logs\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.487159 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmnn9\" (UniqueName: \"kubernetes.io/projected/7998d8ba-9473-4cb5-abeb-29654115491f-kube-api-access-gmnn9\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.590032 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.590132 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.590183 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7998d8ba-9473-4cb5-abeb-29654115491f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.590225 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.590283 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.590328 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7998d8ba-9473-4cb5-abeb-29654115491f-logs\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.590439 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmnn9\" (UniqueName: \"kubernetes.io/projected/7998d8ba-9473-4cb5-abeb-29654115491f-kube-api-access-gmnn9\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.591542 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7998d8ba-9473-4cb5-abeb-29654115491f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.591849 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7998d8ba-9473-4cb5-abeb-29654115491f-logs\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.603402 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.618386 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.618819 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmnn9\" (UniqueName: \"kubernetes.io/projected/7998d8ba-9473-4cb5-abeb-29654115491f-kube-api-access-gmnn9\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.619000 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.634970 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.635020 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/31401ebfd1660da8e240395dcb349960c63d08b34963a13b7abf991967d4dead/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.694425 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.900373 4982 generic.go:334] "Generic (PLEG): container finished" podID="77a05301-23c6-48b2-8351-5b02e3a0751f" containerID="a7f35ea5929175b17a3da02549a1fb7d078007dde24f98ca3dab4a52263be641" exitCode=0 Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.900481 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-l9gdk" event={"ID":"77a05301-23c6-48b2-8351-5b02e3a0751f","Type":"ContainerDied","Data":"a7f35ea5929175b17a3da02549a1fb7d078007dde24f98ca3dab4a52263be641"} Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.912595 4982 generic.go:334] "Generic (PLEG): container finished" podID="0f789c61-3a28-4597-b05b-0d9e70ad55b2" containerID="d6e87a5f4c68b60ac572f97e881a1f80c94172957a8e7bde23ea8a074fa7ee8d" exitCode=0 Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.912634 4982 generic.go:334] "Generic (PLEG): container finished" podID="0f789c61-3a28-4597-b05b-0d9e70ad55b2" containerID="de0a6e61c8dfb118b17b8d13a57868022f48557b8f8adc72876a38b05d1c2bf2" exitCode=143 Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.912690 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0f789c61-3a28-4597-b05b-0d9e70ad55b2","Type":"ContainerDied","Data":"d6e87a5f4c68b60ac572f97e881a1f80c94172957a8e7bde23ea8a074fa7ee8d"} Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.912762 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0f789c61-3a28-4597-b05b-0d9e70ad55b2","Type":"ContainerDied","Data":"de0a6e61c8dfb118b17b8d13a57868022f48557b8f8adc72876a38b05d1c2bf2"} Dec 05 19:34:45 crc kubenswrapper[4982]: I1205 19:34:45.962618 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:34:46 crc kubenswrapper[4982]: I1205 19:34:46.233768 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:34:48 crc kubenswrapper[4982]: I1205 19:34:48.830291 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:34:48 crc kubenswrapper[4982]: I1205 19:34:48.919040 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zfdp6"] Dec 05 19:34:48 crc kubenswrapper[4982]: I1205 19:34:48.919368 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" podUID="0456e5b5-538b-4878-803d-ed6ae4d61998" containerName="dnsmasq-dns" containerID="cri-o://02897d28ae6fba5443272ca81447b43cd6779ffaf119577b54483eb060ebb87a" gracePeriod=10 Dec 05 19:34:49 crc kubenswrapper[4982]: I1205 19:34:49.978555 4982 generic.go:334] "Generic (PLEG): container finished" podID="0456e5b5-538b-4878-803d-ed6ae4d61998" containerID="02897d28ae6fba5443272ca81447b43cd6779ffaf119577b54483eb060ebb87a" exitCode=0 Dec 05 19:34:49 crc kubenswrapper[4982]: I1205 19:34:49.978599 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" event={"ID":"0456e5b5-538b-4878-803d-ed6ae4d61998","Type":"ContainerDied","Data":"02897d28ae6fba5443272ca81447b43cd6779ffaf119577b54483eb060ebb87a"} Dec 05 19:34:52 crc kubenswrapper[4982]: I1205 19:34:52.614559 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" podUID="0456e5b5-538b-4878-803d-ed6ae4d61998" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: connect: connection refused" Dec 05 19:34:58 crc kubenswrapper[4982]: E1205 19:34:58.228038 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Dec 05 19:34:58 crc kubenswrapper[4982]: E1205 19:34:58.228849 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jjm4c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-k7wz7_openstack(075da87a-0ab9-462b-9435-5881b90bd9a3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:34:58 crc kubenswrapper[4982]: E1205 19:34:58.230180 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-k7wz7" podUID="075da87a-0ab9-462b-9435-5881b90bd9a3" Dec 05 19:34:59 crc kubenswrapper[4982]: E1205 19:34:59.086079 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-k7wz7" podUID="075da87a-0ab9-462b-9435-5881b90bd9a3" Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.899287 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.963412 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-scripts\") pod \"77a05301-23c6-48b2-8351-5b02e3a0751f\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.963474 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-combined-ca-bundle\") pod \"77a05301-23c6-48b2-8351-5b02e3a0751f\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.963577 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtq94\" (UniqueName: \"kubernetes.io/projected/77a05301-23c6-48b2-8351-5b02e3a0751f-kube-api-access-jtq94\") pod \"77a05301-23c6-48b2-8351-5b02e3a0751f\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.963602 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-credential-keys\") pod \"77a05301-23c6-48b2-8351-5b02e3a0751f\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.963655 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-config-data\") pod \"77a05301-23c6-48b2-8351-5b02e3a0751f\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.963682 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-fernet-keys\") pod \"77a05301-23c6-48b2-8351-5b02e3a0751f\" (UID: \"77a05301-23c6-48b2-8351-5b02e3a0751f\") " Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.978385 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "77a05301-23c6-48b2-8351-5b02e3a0751f" (UID: "77a05301-23c6-48b2-8351-5b02e3a0751f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.981391 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "77a05301-23c6-48b2-8351-5b02e3a0751f" (UID: "77a05301-23c6-48b2-8351-5b02e3a0751f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.981437 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-scripts" (OuterVolumeSpecName: "scripts") pod "77a05301-23c6-48b2-8351-5b02e3a0751f" (UID: "77a05301-23c6-48b2-8351-5b02e3a0751f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.987041 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77a05301-23c6-48b2-8351-5b02e3a0751f-kube-api-access-jtq94" (OuterVolumeSpecName: "kube-api-access-jtq94") pod "77a05301-23c6-48b2-8351-5b02e3a0751f" (UID: "77a05301-23c6-48b2-8351-5b02e3a0751f"). InnerVolumeSpecName "kube-api-access-jtq94". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.998252 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "77a05301-23c6-48b2-8351-5b02e3a0751f" (UID: "77a05301-23c6-48b2-8351-5b02e3a0751f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:00 crc kubenswrapper[4982]: I1205 19:35:00.998730 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-config-data" (OuterVolumeSpecName: "config-data") pod "77a05301-23c6-48b2-8351-5b02e3a0751f" (UID: "77a05301-23c6-48b2-8351-5b02e3a0751f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:01 crc kubenswrapper[4982]: I1205 19:35:01.065478 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:01 crc kubenswrapper[4982]: I1205 19:35:01.065507 4982 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:01 crc kubenswrapper[4982]: I1205 19:35:01.065517 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:01 crc kubenswrapper[4982]: I1205 19:35:01.065525 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:01 crc kubenswrapper[4982]: I1205 19:35:01.065535 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtq94\" (UniqueName: \"kubernetes.io/projected/77a05301-23c6-48b2-8351-5b02e3a0751f-kube-api-access-jtq94\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:01 crc kubenswrapper[4982]: I1205 19:35:01.065543 4982 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/77a05301-23c6-48b2-8351-5b02e3a0751f-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:01 crc kubenswrapper[4982]: I1205 19:35:01.104554 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-l9gdk" event={"ID":"77a05301-23c6-48b2-8351-5b02e3a0751f","Type":"ContainerDied","Data":"7fc1f97de47a08c22fed16ed843a4c2ed84801939355a567668109857bfd5ac6"} Dec 05 19:35:01 crc kubenswrapper[4982]: I1205 19:35:01.104590 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fc1f97de47a08c22fed16ed843a4c2ed84801939355a567668109857bfd5ac6" Dec 05 19:35:01 crc kubenswrapper[4982]: I1205 19:35:01.104653 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-l9gdk" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.015262 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-l9gdk"] Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.022887 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-l9gdk"] Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.101263 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-zz5w6"] Dec 05 19:35:02 crc kubenswrapper[4982]: E1205 19:35:02.101661 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77a05301-23c6-48b2-8351-5b02e3a0751f" containerName="keystone-bootstrap" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.101683 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="77a05301-23c6-48b2-8351-5b02e3a0751f" containerName="keystone-bootstrap" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.101908 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="77a05301-23c6-48b2-8351-5b02e3a0751f" containerName="keystone-bootstrap" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.103504 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.105994 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.106045 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.106164 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.106252 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-knkd7" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.119274 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-zz5w6"] Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.186668 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-credential-keys\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.186714 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2sxx\" (UniqueName: \"kubernetes.io/projected/b33cca72-9a2b-4042-b38e-3f01396d064c-kube-api-access-z2sxx\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.186848 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-fernet-keys\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.186908 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-config-data\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.186945 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-scripts\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.186976 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-combined-ca-bundle\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.289872 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-fernet-keys\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.289999 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-config-data\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.290077 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-scripts\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.290111 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-combined-ca-bundle\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.290221 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-credential-keys\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.290261 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2sxx\" (UniqueName: \"kubernetes.io/projected/b33cca72-9a2b-4042-b38e-3f01396d064c-kube-api-access-z2sxx\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.298705 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-combined-ca-bundle\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.298745 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-config-data\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.298826 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-scripts\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.302775 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-credential-keys\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.306464 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-fernet-keys\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.307854 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2sxx\" (UniqueName: \"kubernetes.io/projected/b33cca72-9a2b-4042-b38e-3f01396d064c-kube-api-access-z2sxx\") pod \"keystone-bootstrap-zz5w6\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.435117 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:02 crc kubenswrapper[4982]: I1205 19:35:02.621984 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" podUID="0456e5b5-538b-4878-803d-ed6ae4d61998" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: i/o timeout" Dec 05 19:35:03 crc kubenswrapper[4982]: I1205 19:35:03.407543 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77a05301-23c6-48b2-8351-5b02e3a0751f" path="/var/lib/kubelet/pods/77a05301-23c6-48b2-8351-5b02e3a0751f/volumes" Dec 05 19:35:04 crc kubenswrapper[4982]: I1205 19:35:04.135140 4982 generic.go:334] "Generic (PLEG): container finished" podID="3a107703-d667-459a-862f-9ba79f86042f" containerID="d0364e692e0fe8dcc6576f50ff0a5d1b2769552586499e6cc15a11e6551b1070" exitCode=0 Dec 05 19:35:04 crc kubenswrapper[4982]: I1205 19:35:04.135225 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nt8vq" event={"ID":"3a107703-d667-459a-862f-9ba79f86042f","Type":"ContainerDied","Data":"d0364e692e0fe8dcc6576f50ff0a5d1b2769552586499e6cc15a11e6551b1070"} Dec 05 19:35:07 crc kubenswrapper[4982]: I1205 19:35:07.623279 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" podUID="0456e5b5-538b-4878-803d-ed6ae4d61998" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: i/o timeout" Dec 05 19:35:07 crc kubenswrapper[4982]: I1205 19:35:07.623845 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:35:09 crc kubenswrapper[4982]: I1205 19:35:09.112864 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 19:35:09 crc kubenswrapper[4982]: I1205 19:35:09.113302 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.057026 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.168465 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-dns-svc\") pod \"0456e5b5-538b-4878-803d-ed6ae4d61998\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.168534 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-ovsdbserver-sb\") pod \"0456e5b5-538b-4878-803d-ed6ae4d61998\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.168617 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-config\") pod \"0456e5b5-538b-4878-803d-ed6ae4d61998\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.168716 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-ovsdbserver-nb\") pod \"0456e5b5-538b-4878-803d-ed6ae4d61998\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.168847 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ch7xw\" (UniqueName: \"kubernetes.io/projected/0456e5b5-538b-4878-803d-ed6ae4d61998-kube-api-access-ch7xw\") pod \"0456e5b5-538b-4878-803d-ed6ae4d61998\" (UID: \"0456e5b5-538b-4878-803d-ed6ae4d61998\") " Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.181730 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0456e5b5-538b-4878-803d-ed6ae4d61998-kube-api-access-ch7xw" (OuterVolumeSpecName: "kube-api-access-ch7xw") pod "0456e5b5-538b-4878-803d-ed6ae4d61998" (UID: "0456e5b5-538b-4878-803d-ed6ae4d61998"). InnerVolumeSpecName "kube-api-access-ch7xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.211555 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" event={"ID":"0456e5b5-538b-4878-803d-ed6ae4d61998","Type":"ContainerDied","Data":"e2ebc6208995f7f9d43a32d89405d603a02dc339ced8d6db5d1bc96f415f6f40"} Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.211617 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.212020 4982 scope.go:117] "RemoveContainer" containerID="02897d28ae6fba5443272ca81447b43cd6779ffaf119577b54483eb060ebb87a" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.220329 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-config" (OuterVolumeSpecName: "config") pod "0456e5b5-538b-4878-803d-ed6ae4d61998" (UID: "0456e5b5-538b-4878-803d-ed6ae4d61998"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.225715 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0456e5b5-538b-4878-803d-ed6ae4d61998" (UID: "0456e5b5-538b-4878-803d-ed6ae4d61998"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.229932 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0456e5b5-538b-4878-803d-ed6ae4d61998" (UID: "0456e5b5-538b-4878-803d-ed6ae4d61998"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.234307 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0456e5b5-538b-4878-803d-ed6ae4d61998" (UID: "0456e5b5-538b-4878-803d-ed6ae4d61998"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.271078 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.271122 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.271142 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ch7xw\" (UniqueName: \"kubernetes.io/projected/0456e5b5-538b-4878-803d-ed6ae4d61998-kube-api-access-ch7xw\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.271219 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.271231 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0456e5b5-538b-4878-803d-ed6ae4d61998-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.545161 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zfdp6"] Dec 05 19:35:11 crc kubenswrapper[4982]: I1205 19:35:11.557968 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-zfdp6"] Dec 05 19:35:12 crc kubenswrapper[4982]: I1205 19:35:12.624140 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-zfdp6" podUID="0456e5b5-538b-4878-803d-ed6ae4d61998" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: i/o timeout" Dec 05 19:35:13 crc kubenswrapper[4982]: I1205 19:35:13.401043 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0456e5b5-538b-4878-803d-ed6ae4d61998" path="/var/lib/kubelet/pods/0456e5b5-538b-4878-803d-ed6ae4d61998/volumes" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.851891 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.875323 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f789c61-3a28-4597-b05b-0d9e70ad55b2-httpd-run\") pod \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.875689 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-combined-ca-bundle\") pod \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.875739 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f789c61-3a28-4597-b05b-0d9e70ad55b2-logs\") pod \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.875832 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-scripts\") pod \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.875878 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wq7n\" (UniqueName: \"kubernetes.io/projected/0f789c61-3a28-4597-b05b-0d9e70ad55b2-kube-api-access-4wq7n\") pod \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.876167 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.876256 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-config-data\") pod \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\" (UID: \"0f789c61-3a28-4597-b05b-0d9e70ad55b2\") " Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.878176 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f789c61-3a28-4597-b05b-0d9e70ad55b2-logs" (OuterVolumeSpecName: "logs") pod "0f789c61-3a28-4597-b05b-0d9e70ad55b2" (UID: "0f789c61-3a28-4597-b05b-0d9e70ad55b2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.878283 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f789c61-3a28-4597-b05b-0d9e70ad55b2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0f789c61-3a28-4597-b05b-0d9e70ad55b2" (UID: "0f789c61-3a28-4597-b05b-0d9e70ad55b2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.882989 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-scripts" (OuterVolumeSpecName: "scripts") pod "0f789c61-3a28-4597-b05b-0d9e70ad55b2" (UID: "0f789c61-3a28-4597-b05b-0d9e70ad55b2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.896534 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f789c61-3a28-4597-b05b-0d9e70ad55b2-kube-api-access-4wq7n" (OuterVolumeSpecName: "kube-api-access-4wq7n") pod "0f789c61-3a28-4597-b05b-0d9e70ad55b2" (UID: "0f789c61-3a28-4597-b05b-0d9e70ad55b2"). InnerVolumeSpecName "kube-api-access-4wq7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.907487 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f789c61-3a28-4597-b05b-0d9e70ad55b2" (UID: "0f789c61-3a28-4597-b05b-0d9e70ad55b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.909540 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f" (OuterVolumeSpecName: "glance") pod "0f789c61-3a28-4597-b05b-0d9e70ad55b2" (UID: "0f789c61-3a28-4597-b05b-0d9e70ad55b2"). InnerVolumeSpecName "pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.949881 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-config-data" (OuterVolumeSpecName: "config-data") pod "0f789c61-3a28-4597-b05b-0d9e70ad55b2" (UID: "0f789c61-3a28-4597-b05b-0d9e70ad55b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.977981 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f789c61-3a28-4597-b05b-0d9e70ad55b2-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.978027 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.978039 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f789c61-3a28-4597-b05b-0d9e70ad55b2-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.978050 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.978058 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wq7n\" (UniqueName: \"kubernetes.io/projected/0f789c61-3a28-4597-b05b-0d9e70ad55b2-kube-api-access-4wq7n\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.978111 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") on node \"crc\" " Dec 05 19:35:16 crc kubenswrapper[4982]: I1205 19:35:16.978122 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f789c61-3a28-4597-b05b-0d9e70ad55b2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.026861 4982 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.027285 4982 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f") on node "crc" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.080012 4982 reconciler_common.go:293] "Volume detached for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.272184 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"0f789c61-3a28-4597-b05b-0d9e70ad55b2","Type":"ContainerDied","Data":"f783f02460f422e81947651a4a1b35eef8a76ec8953e0eac529131c5e13b2268"} Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.272241 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.305271 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.311548 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:35:17 crc kubenswrapper[4982]: E1205 19:35:17.322492 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 05 19:35:17 crc kubenswrapper[4982]: E1205 19:35:17.322631 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-97stg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-6vvtc_openstack(641d839f-9ca5-4835-ba20-2c6981a00df3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:35:17 crc kubenswrapper[4982]: E1205 19:35:17.324823 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-6vvtc" podUID="641d839f-9ca5-4835-ba20-2c6981a00df3" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.335701 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.336636 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:35:17 crc kubenswrapper[4982]: E1205 19:35:17.336960 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0456e5b5-538b-4878-803d-ed6ae4d61998" containerName="init" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.336977 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0456e5b5-538b-4878-803d-ed6ae4d61998" containerName="init" Dec 05 19:35:17 crc kubenswrapper[4982]: E1205 19:35:17.336989 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f789c61-3a28-4597-b05b-0d9e70ad55b2" containerName="glance-httpd" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.336996 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f789c61-3a28-4597-b05b-0d9e70ad55b2" containerName="glance-httpd" Dec 05 19:35:17 crc kubenswrapper[4982]: E1205 19:35:17.337026 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0456e5b5-538b-4878-803d-ed6ae4d61998" containerName="dnsmasq-dns" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.337032 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0456e5b5-538b-4878-803d-ed6ae4d61998" containerName="dnsmasq-dns" Dec 05 19:35:17 crc kubenswrapper[4982]: E1205 19:35:17.337050 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a107703-d667-459a-862f-9ba79f86042f" containerName="neutron-db-sync" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.337056 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a107703-d667-459a-862f-9ba79f86042f" containerName="neutron-db-sync" Dec 05 19:35:17 crc kubenswrapper[4982]: E1205 19:35:17.337068 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f789c61-3a28-4597-b05b-0d9e70ad55b2" containerName="glance-log" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.337073 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f789c61-3a28-4597-b05b-0d9e70ad55b2" containerName="glance-log" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.337347 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a107703-d667-459a-862f-9ba79f86042f" containerName="neutron-db-sync" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.337364 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f789c61-3a28-4597-b05b-0d9e70ad55b2" containerName="glance-httpd" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.337379 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f789c61-3a28-4597-b05b-0d9e70ad55b2" containerName="glance-log" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.338526 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0456e5b5-538b-4878-803d-ed6ae4d61998" containerName="dnsmasq-dns" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.339686 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.350939 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.356666 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.373747 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.423822 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f789c61-3a28-4597-b05b-0d9e70ad55b2" path="/var/lib/kubelet/pods/0f789c61-3a28-4597-b05b-0d9e70ad55b2/volumes" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.499760 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s87gj\" (UniqueName: \"kubernetes.io/projected/3a107703-d667-459a-862f-9ba79f86042f-kube-api-access-s87gj\") pod \"3a107703-d667-459a-862f-9ba79f86042f\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.499864 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-combined-ca-bundle\") pod \"3a107703-d667-459a-862f-9ba79f86042f\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.499987 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-config\") pod \"3a107703-d667-459a-862f-9ba79f86042f\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.500243 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.500268 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-scripts\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.500326 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9aa8afe-080d-49c5-b812-dba7db048ed9-logs\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.500390 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9aa8afe-080d-49c5-b812-dba7db048ed9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.501076 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.501186 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-config-data\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.501256 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtzvb\" (UniqueName: \"kubernetes.io/projected/b9aa8afe-080d-49c5-b812-dba7db048ed9-kube-api-access-jtzvb\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.501315 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.515450 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a107703-d667-459a-862f-9ba79f86042f-kube-api-access-s87gj" (OuterVolumeSpecName: "kube-api-access-s87gj") pod "3a107703-d667-459a-862f-9ba79f86042f" (UID: "3a107703-d667-459a-862f-9ba79f86042f"). InnerVolumeSpecName "kube-api-access-s87gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:17 crc kubenswrapper[4982]: E1205 19:35:17.524730 4982 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-combined-ca-bundle podName:3a107703-d667-459a-862f-9ba79f86042f nodeName:}" failed. No retries permitted until 2025-12-05 19:35:18.024704216 +0000 UTC m=+1296.906590211 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-combined-ca-bundle") pod "3a107703-d667-459a-862f-9ba79f86042f" (UID: "3a107703-d667-459a-862f-9ba79f86042f") : error deleting /var/lib/kubelet/pods/3a107703-d667-459a-862f-9ba79f86042f/volume-subpaths: remove /var/lib/kubelet/pods/3a107703-d667-459a-862f-9ba79f86042f/volume-subpaths: no such file or directory Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.527070 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-config" (OuterVolumeSpecName: "config") pod "3a107703-d667-459a-862f-9ba79f86042f" (UID: "3a107703-d667-459a-862f-9ba79f86042f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.602770 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.602819 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-config-data\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.602839 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtzvb\" (UniqueName: \"kubernetes.io/projected/b9aa8afe-080d-49c5-b812-dba7db048ed9-kube-api-access-jtzvb\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.602864 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.602895 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.602914 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-scripts\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.602945 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9aa8afe-080d-49c5-b812-dba7db048ed9-logs\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.603019 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9aa8afe-080d-49c5-b812-dba7db048ed9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.603079 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s87gj\" (UniqueName: \"kubernetes.io/projected/3a107703-d667-459a-862f-9ba79f86042f-kube-api-access-s87gj\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.603091 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.603514 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9aa8afe-080d-49c5-b812-dba7db048ed9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.603843 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9aa8afe-080d-49c5-b812-dba7db048ed9-logs\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.606935 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.606967 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/adcd8f961212518a09eddf140065c73138e877cdf387d14b4b72fd8f3cd3396b/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.607327 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-scripts\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.607463 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.608233 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.608720 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-config-data\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.621688 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtzvb\" (UniqueName: \"kubernetes.io/projected/b9aa8afe-080d-49c5-b812-dba7db048ed9-kube-api-access-jtzvb\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.645704 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"glance-default-external-api-0\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " pod="openstack/glance-default-external-api-0" Dec 05 19:35:17 crc kubenswrapper[4982]: I1205 19:35:17.703645 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.112552 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-combined-ca-bundle\") pod \"3a107703-d667-459a-862f-9ba79f86042f\" (UID: \"3a107703-d667-459a-862f-9ba79f86042f\") " Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.119397 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3a107703-d667-459a-862f-9ba79f86042f" (UID: "3a107703-d667-459a-862f-9ba79f86042f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.214797 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3a107703-d667-459a-862f-9ba79f86042f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.281605 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-nt8vq" event={"ID":"3a107703-d667-459a-862f-9ba79f86042f","Type":"ContainerDied","Data":"2ced7be590641d988d96e9efbf9429fcb358d8ca3b36b6a946c6bde37e4948a5"} Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.281657 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2ced7be590641d988d96e9efbf9429fcb358d8ca3b36b6a946c6bde37e4948a5" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.281658 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-nt8vq" Dec 05 19:35:18 crc kubenswrapper[4982]: E1205 19:35:18.284121 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-6vvtc" podUID="641d839f-9ca5-4835-ba20-2c6981a00df3" Dec 05 19:35:18 crc kubenswrapper[4982]: E1205 19:35:18.558372 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 05 19:35:18 crc kubenswrapper[4982]: E1205 19:35:18.558878 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-m7qrv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-qb5jh_openstack(253ffb42-0135-4d3b-b21c-0810b4591a69): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:35:18 crc kubenswrapper[4982]: E1205 19:35:18.560252 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-qb5jh" podUID="253ffb42-0135-4d3b-b21c-0810b4591a69" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.617660 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-6mnfx"] Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.632296 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.661266 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-6mnfx"] Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.704609 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-65f644b6d4-7k4x9"] Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.706055 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.713135 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-65f644b6d4-7k4x9"] Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.714408 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.714611 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.714831 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-kbnmd" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.714981 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.727727 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.727798 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.727867 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cb2z2\" (UniqueName: \"kubernetes.io/projected/dd328ed6-3c75-4e39-9f77-3ce9629c8421-kube-api-access-cb2z2\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.727889 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-config\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.727915 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.727964 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.829672 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.829759 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-ovndb-tls-certs\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.829782 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-combined-ca-bundle\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.829829 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.829874 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.829915 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-config\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.829937 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2bqp\" (UniqueName: \"kubernetes.io/projected/09d53fa6-9ae9-4d81-ab50-281565de9186-kube-api-access-h2bqp\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.829955 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cb2z2\" (UniqueName: \"kubernetes.io/projected/dd328ed6-3c75-4e39-9f77-3ce9629c8421-kube-api-access-cb2z2\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.829970 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-httpd-config\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.829993 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-config\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.830022 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.830859 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.830857 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.831679 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-config\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.832254 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.832707 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.861818 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cb2z2\" (UniqueName: \"kubernetes.io/projected/dd328ed6-3c75-4e39-9f77-3ce9629c8421-kube-api-access-cb2z2\") pod \"dnsmasq-dns-5ccc5c4795-6mnfx\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.932957 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-config\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.933008 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2bqp\" (UniqueName: \"kubernetes.io/projected/09d53fa6-9ae9-4d81-ab50-281565de9186-kube-api-access-h2bqp\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.933032 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-httpd-config\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.933268 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-ovndb-tls-certs\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.933288 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-combined-ca-bundle\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.942131 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-combined-ca-bundle\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.945919 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-config\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.950736 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2bqp\" (UniqueName: \"kubernetes.io/projected/09d53fa6-9ae9-4d81-ab50-281565de9186-kube-api-access-h2bqp\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.955311 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-httpd-config\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.958324 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-ovndb-tls-certs\") pod \"neutron-65f644b6d4-7k4x9\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:18 crc kubenswrapper[4982]: I1205 19:35:18.996785 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:19 crc kubenswrapper[4982]: I1205 19:35:19.042585 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:19 crc kubenswrapper[4982]: I1205 19:35:19.179629 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:35:19 crc kubenswrapper[4982]: E1205 19:35:19.292732 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-qb5jh" podUID="253ffb42-0135-4d3b-b21c-0810b4591a69" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.737356 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-67fdf46f87-pnjnc"] Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.739477 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.742565 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.742691 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.772545 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-67fdf46f87-pnjnc"] Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.877453 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-config\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.877503 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-ovndb-tls-certs\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.877703 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-public-tls-certs\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.877866 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-combined-ca-bundle\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.877920 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-internal-tls-certs\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.878048 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9s8x\" (UniqueName: \"kubernetes.io/projected/4afaca8a-acd1-480f-a132-33155fb3b1b0-kube-api-access-b9s8x\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.878082 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-httpd-config\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.980257 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9s8x\" (UniqueName: \"kubernetes.io/projected/4afaca8a-acd1-480f-a132-33155fb3b1b0-kube-api-access-b9s8x\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.980310 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-httpd-config\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.980405 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-config\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.980442 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-ovndb-tls-certs\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.980517 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-public-tls-certs\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.980578 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-combined-ca-bundle\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.980607 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-internal-tls-certs\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.986465 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-config\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.988135 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-httpd-config\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.988167 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-combined-ca-bundle\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.988637 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-ovndb-tls-certs\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.989028 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-public-tls-certs\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:20 crc kubenswrapper[4982]: I1205 19:35:20.989126 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4afaca8a-acd1-480f-a132-33155fb3b1b0-internal-tls-certs\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:21 crc kubenswrapper[4982]: I1205 19:35:21.003843 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9s8x\" (UniqueName: \"kubernetes.io/projected/4afaca8a-acd1-480f-a132-33155fb3b1b0-kube-api-access-b9s8x\") pod \"neutron-67fdf46f87-pnjnc\" (UID: \"4afaca8a-acd1-480f-a132-33155fb3b1b0\") " pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:21 crc kubenswrapper[4982]: I1205 19:35:21.064533 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:22 crc kubenswrapper[4982]: I1205 19:35:22.125041 4982 scope.go:117] "RemoveContainer" containerID="7c547d2fe211cfdd09eff3f3d9b4116cf7a8965025b7a7bff9bc3f0aca0181ff" Dec 05 19:35:22 crc kubenswrapper[4982]: W1205 19:35:22.133352 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7998d8ba_9473_4cb5_abeb_29654115491f.slice/crio-e3337a401e1471c90df2e51aaea85292ed0d83930b904496b733df114f9ee1cc WatchSource:0}: Error finding container e3337a401e1471c90df2e51aaea85292ed0d83930b904496b733df114f9ee1cc: Status 404 returned error can't find the container with id e3337a401e1471c90df2e51aaea85292ed0d83930b904496b733df114f9ee1cc Dec 05 19:35:22 crc kubenswrapper[4982]: I1205 19:35:22.333186 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7998d8ba-9473-4cb5-abeb-29654115491f","Type":"ContainerStarted","Data":"e3337a401e1471c90df2e51aaea85292ed0d83930b904496b733df114f9ee1cc"} Dec 05 19:35:22 crc kubenswrapper[4982]: E1205 19:35:22.690464 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Dec 05 19:35:22 crc kubenswrapper[4982]: E1205 19:35:22.690714 4982 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Dec 05 19:35:22 crc kubenswrapper[4982]: E1205 19:35:22.690826 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h6m7v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42406,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-hf7s9_openstack(58e0a579-42f9-40b8-a0b4-13902c0fe8c7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:35:22 crc kubenswrapper[4982]: E1205 19:35:22.692157 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-hf7s9" podUID="58e0a579-42f9-40b8-a0b4-13902c0fe8c7" Dec 05 19:35:22 crc kubenswrapper[4982]: I1205 19:35:22.820615 4982 scope.go:117] "RemoveContainer" containerID="d6e87a5f4c68b60ac572f97e881a1f80c94172957a8e7bde23ea8a074fa7ee8d" Dec 05 19:35:22 crc kubenswrapper[4982]: I1205 19:35:22.919507 4982 scope.go:117] "RemoveContainer" containerID="de0a6e61c8dfb118b17b8d13a57868022f48557b8f8adc72876a38b05d1c2bf2" Dec 05 19:35:23 crc kubenswrapper[4982]: I1205 19:35:23.169584 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-zz5w6"] Dec 05 19:35:23 crc kubenswrapper[4982]: I1205 19:35:23.352122 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-65f644b6d4-7k4x9"] Dec 05 19:35:23 crc kubenswrapper[4982]: I1205 19:35:23.356210 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zz5w6" event={"ID":"b33cca72-9a2b-4042-b38e-3f01396d064c","Type":"ContainerStarted","Data":"dfba08d66836f7c36cab418db175eabdffe31e374ce1748bb4f89b6f283810c4"} Dec 05 19:35:23 crc kubenswrapper[4982]: I1205 19:35:23.367824 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36cc8bb3-de84-43c8-9b70-f14d2532598b","Type":"ContainerStarted","Data":"e2795feea1a4f33637a601a44d94fe2529c067e6de0b1673431a03a92acacf17"} Dec 05 19:35:23 crc kubenswrapper[4982]: I1205 19:35:23.376544 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-k7wz7" event={"ID":"075da87a-0ab9-462b-9435-5881b90bd9a3","Type":"ContainerStarted","Data":"3210ea976023a897587e17fc45685184e786eb8ce297c0e13bbe66dddad4d0d9"} Dec 05 19:35:23 crc kubenswrapper[4982]: E1205 19:35:23.392300 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current\\\"\"" pod="openstack/cloudkitty-db-sync-hf7s9" podUID="58e0a579-42f9-40b8-a0b4-13902c0fe8c7" Dec 05 19:35:23 crc kubenswrapper[4982]: I1205 19:35:23.405090 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-k7wz7" podStartSLOduration=3.292807247 podStartE2EDuration="46.40506988s" podCreationTimestamp="2025-12-05 19:34:37 +0000 UTC" firstStartedPulling="2025-12-05 19:34:39.74104367 +0000 UTC m=+1258.622929665" lastFinishedPulling="2025-12-05 19:35:22.853306293 +0000 UTC m=+1301.735192298" observedRunningTime="2025-12-05 19:35:23.401523891 +0000 UTC m=+1302.283409886" watchObservedRunningTime="2025-12-05 19:35:23.40506988 +0000 UTC m=+1302.286955875" Dec 05 19:35:23 crc kubenswrapper[4982]: I1205 19:35:23.458316 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:35:23 crc kubenswrapper[4982]: I1205 19:35:23.498584 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-6mnfx"] Dec 05 19:35:23 crc kubenswrapper[4982]: W1205 19:35:23.530946 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4afaca8a_acd1_480f_a132_33155fb3b1b0.slice/crio-42b1ba4ed14ac0194ec58099a9a1bd4b6cbacf769f7c022894e903c448ebaccc WatchSource:0}: Error finding container 42b1ba4ed14ac0194ec58099a9a1bd4b6cbacf769f7c022894e903c448ebaccc: Status 404 returned error can't find the container with id 42b1ba4ed14ac0194ec58099a9a1bd4b6cbacf769f7c022894e903c448ebaccc Dec 05 19:35:23 crc kubenswrapper[4982]: I1205 19:35:23.532111 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-67fdf46f87-pnjnc"] Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.411093 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zz5w6" event={"ID":"b33cca72-9a2b-4042-b38e-3f01396d064c","Type":"ContainerStarted","Data":"da898f364e9f895b46391b48d85e9de6ac79239ad0635ebb462fad8ea60dfe57"} Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.443314 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-67fdf46f87-pnjnc" event={"ID":"4afaca8a-acd1-480f-a132-33155fb3b1b0","Type":"ContainerStarted","Data":"3f1a1a496f240f465e040889ffbd0d9a7800c6877542fdda6a21345b2c188fb6"} Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.443364 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-67fdf46f87-pnjnc" event={"ID":"4afaca8a-acd1-480f-a132-33155fb3b1b0","Type":"ContainerStarted","Data":"2841a32b25c9564bb8dfc5b0d43c8fc8d9be10e154b314934a092e5581c73a8f"} Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.443375 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-67fdf46f87-pnjnc" event={"ID":"4afaca8a-acd1-480f-a132-33155fb3b1b0","Type":"ContainerStarted","Data":"42b1ba4ed14ac0194ec58099a9a1bd4b6cbacf769f7c022894e903c448ebaccc"} Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.444221 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.445738 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9aa8afe-080d-49c5-b812-dba7db048ed9","Type":"ContainerStarted","Data":"e1ffacc643eb622b9190264e0d7bc7dbfa1d66d2e6532aed2f5fc811b3a0027b"} Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.445798 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9aa8afe-080d-49c5-b812-dba7db048ed9","Type":"ContainerStarted","Data":"7cb6aceb419311c7cb9a71d859d9d79fea90712a9cce02f8d85665766f162b6e"} Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.447100 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7998d8ba-9473-4cb5-abeb-29654115491f","Type":"ContainerStarted","Data":"7dac6f29a657ab6ae301da630d6f9c073e3ed56c7bf90cc0d5ac96433feeaae4"} Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.448400 4982 generic.go:334] "Generic (PLEG): container finished" podID="dd328ed6-3c75-4e39-9f77-3ce9629c8421" containerID="57c38d6b52727a9a1d1b26204a8f134b2bcf46315edc9187daefc82a524d88e2" exitCode=0 Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.448475 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" event={"ID":"dd328ed6-3c75-4e39-9f77-3ce9629c8421","Type":"ContainerDied","Data":"57c38d6b52727a9a1d1b26204a8f134b2bcf46315edc9187daefc82a524d88e2"} Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.448496 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" event={"ID":"dd328ed6-3c75-4e39-9f77-3ce9629c8421","Type":"ContainerStarted","Data":"40cbb17511795a4cce15dad15e062cfa127fae20e8d1146df70c0f3aa5016994"} Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.460417 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65f644b6d4-7k4x9" event={"ID":"09d53fa6-9ae9-4d81-ab50-281565de9186","Type":"ContainerStarted","Data":"f9b53eccc6e3de879b9e1dbf175b2bbcfae4f700e6d90286b8eea0e91b1e496b"} Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.460844 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65f644b6d4-7k4x9" event={"ID":"09d53fa6-9ae9-4d81-ab50-281565de9186","Type":"ContainerStarted","Data":"c397eece78b8f6050c7682c23c1109a74e624a9ccd50d154078437635693a49e"} Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.460917 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65f644b6d4-7k4x9" event={"ID":"09d53fa6-9ae9-4d81-ab50-281565de9186","Type":"ContainerStarted","Data":"cde8215a6d58a31c461021b075063e60e80e2444da75f3941021275f00f809fc"} Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.461063 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.465725 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-zz5w6" podStartSLOduration=22.465695394 podStartE2EDuration="22.465695394s" podCreationTimestamp="2025-12-05 19:35:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:24.458291418 +0000 UTC m=+1303.340177413" watchObservedRunningTime="2025-12-05 19:35:24.465695394 +0000 UTC m=+1303.347581389" Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.489103 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-67fdf46f87-pnjnc" podStartSLOduration=4.489085493 podStartE2EDuration="4.489085493s" podCreationTimestamp="2025-12-05 19:35:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:24.488066827 +0000 UTC m=+1303.369952832" watchObservedRunningTime="2025-12-05 19:35:24.489085493 +0000 UTC m=+1303.370971488" Dec 05 19:35:24 crc kubenswrapper[4982]: I1205 19:35:24.549439 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-65f644b6d4-7k4x9" podStartSLOduration=6.549423031 podStartE2EDuration="6.549423031s" podCreationTimestamp="2025-12-05 19:35:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:24.53507081 +0000 UTC m=+1303.416956805" watchObservedRunningTime="2025-12-05 19:35:24.549423031 +0000 UTC m=+1303.431309026" Dec 05 19:35:25 crc kubenswrapper[4982]: I1205 19:35:25.471444 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9aa8afe-080d-49c5-b812-dba7db048ed9","Type":"ContainerStarted","Data":"73cc935b8f684877c72bc1969c15c1c851a56db75a8b48d1fdd2a567d9334e1b"} Dec 05 19:35:25 crc kubenswrapper[4982]: I1205 19:35:25.478900 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7998d8ba-9473-4cb5-abeb-29654115491f","Type":"ContainerStarted","Data":"658b9b54b24dd81dcfd656166686b1b6238f26258369789dfdfa59309615f8e2"} Dec 05 19:35:25 crc kubenswrapper[4982]: I1205 19:35:25.479022 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="7998d8ba-9473-4cb5-abeb-29654115491f" containerName="glance-log" containerID="cri-o://7dac6f29a657ab6ae301da630d6f9c073e3ed56c7bf90cc0d5ac96433feeaae4" gracePeriod=30 Dec 05 19:35:25 crc kubenswrapper[4982]: I1205 19:35:25.479236 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="7998d8ba-9473-4cb5-abeb-29654115491f" containerName="glance-httpd" containerID="cri-o://658b9b54b24dd81dcfd656166686b1b6238f26258369789dfdfa59309615f8e2" gracePeriod=30 Dec 05 19:35:25 crc kubenswrapper[4982]: I1205 19:35:25.484046 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" event={"ID":"dd328ed6-3c75-4e39-9f77-3ce9629c8421","Type":"ContainerStarted","Data":"c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808"} Dec 05 19:35:25 crc kubenswrapper[4982]: I1205 19:35:25.484078 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:25 crc kubenswrapper[4982]: I1205 19:35:25.522726 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" podStartSLOduration=7.521880875 podStartE2EDuration="7.521880875s" podCreationTimestamp="2025-12-05 19:35:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:25.511796321 +0000 UTC m=+1304.393682316" watchObservedRunningTime="2025-12-05 19:35:25.521880875 +0000 UTC m=+1304.403766870" Dec 05 19:35:25 crc kubenswrapper[4982]: I1205 19:35:25.526578 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=8.526565153 podStartE2EDuration="8.526565153s" podCreationTimestamp="2025-12-05 19:35:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:25.495592593 +0000 UTC m=+1304.377478588" watchObservedRunningTime="2025-12-05 19:35:25.526565153 +0000 UTC m=+1304.408451148" Dec 05 19:35:25 crc kubenswrapper[4982]: I1205 19:35:25.546995 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=40.546977396 podStartE2EDuration="40.546977396s" podCreationTimestamp="2025-12-05 19:34:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:25.536226486 +0000 UTC m=+1304.418112481" watchObservedRunningTime="2025-12-05 19:35:25.546977396 +0000 UTC m=+1304.428863391" Dec 05 19:35:26 crc kubenswrapper[4982]: I1205 19:35:26.494842 4982 generic.go:334] "Generic (PLEG): container finished" podID="7998d8ba-9473-4cb5-abeb-29654115491f" containerID="658b9b54b24dd81dcfd656166686b1b6238f26258369789dfdfa59309615f8e2" exitCode=0 Dec 05 19:35:26 crc kubenswrapper[4982]: I1205 19:35:26.495108 4982 generic.go:334] "Generic (PLEG): container finished" podID="7998d8ba-9473-4cb5-abeb-29654115491f" containerID="7dac6f29a657ab6ae301da630d6f9c073e3ed56c7bf90cc0d5ac96433feeaae4" exitCode=143 Dec 05 19:35:26 crc kubenswrapper[4982]: I1205 19:35:26.494906 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7998d8ba-9473-4cb5-abeb-29654115491f","Type":"ContainerDied","Data":"658b9b54b24dd81dcfd656166686b1b6238f26258369789dfdfa59309615f8e2"} Dec 05 19:35:26 crc kubenswrapper[4982]: I1205 19:35:26.495231 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7998d8ba-9473-4cb5-abeb-29654115491f","Type":"ContainerDied","Data":"7dac6f29a657ab6ae301da630d6f9c073e3ed56c7bf90cc0d5ac96433feeaae4"} Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.234755 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.412303 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7998d8ba-9473-4cb5-abeb-29654115491f-httpd-run\") pod \"7998d8ba-9473-4cb5-abeb-29654115491f\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.413334 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7998d8ba-9473-4cb5-abeb-29654115491f-logs\") pod \"7998d8ba-9473-4cb5-abeb-29654115491f\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.413244 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7998d8ba-9473-4cb5-abeb-29654115491f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7998d8ba-9473-4cb5-abeb-29654115491f" (UID: "7998d8ba-9473-4cb5-abeb-29654115491f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.413868 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7998d8ba-9473-4cb5-abeb-29654115491f-logs" (OuterVolumeSpecName: "logs") pod "7998d8ba-9473-4cb5-abeb-29654115491f" (UID: "7998d8ba-9473-4cb5-abeb-29654115491f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.413913 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmnn9\" (UniqueName: \"kubernetes.io/projected/7998d8ba-9473-4cb5-abeb-29654115491f-kube-api-access-gmnn9\") pod \"7998d8ba-9473-4cb5-abeb-29654115491f\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.414009 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-scripts\") pod \"7998d8ba-9473-4cb5-abeb-29654115491f\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.414405 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"7998d8ba-9473-4cb5-abeb-29654115491f\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.414446 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-config-data\") pod \"7998d8ba-9473-4cb5-abeb-29654115491f\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.414930 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-combined-ca-bundle\") pod \"7998d8ba-9473-4cb5-abeb-29654115491f\" (UID: \"7998d8ba-9473-4cb5-abeb-29654115491f\") " Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.416127 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7998d8ba-9473-4cb5-abeb-29654115491f-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.416166 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7998d8ba-9473-4cb5-abeb-29654115491f-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.418484 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7998d8ba-9473-4cb5-abeb-29654115491f-kube-api-access-gmnn9" (OuterVolumeSpecName: "kube-api-access-gmnn9") pod "7998d8ba-9473-4cb5-abeb-29654115491f" (UID: "7998d8ba-9473-4cb5-abeb-29654115491f"). InnerVolumeSpecName "kube-api-access-gmnn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.419193 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-scripts" (OuterVolumeSpecName: "scripts") pod "7998d8ba-9473-4cb5-abeb-29654115491f" (UID: "7998d8ba-9473-4cb5-abeb-29654115491f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.451312 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b" (OuterVolumeSpecName: "glance") pod "7998d8ba-9473-4cb5-abeb-29654115491f" (UID: "7998d8ba-9473-4cb5-abeb-29654115491f"). InnerVolumeSpecName "pvc-9fbefc77-3371-4465-80eb-112fbd1c349b". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.459764 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7998d8ba-9473-4cb5-abeb-29654115491f" (UID: "7998d8ba-9473-4cb5-abeb-29654115491f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.488879 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-config-data" (OuterVolumeSpecName: "config-data") pod "7998d8ba-9473-4cb5-abeb-29654115491f" (UID: "7998d8ba-9473-4cb5-abeb-29654115491f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.507320 4982 generic.go:334] "Generic (PLEG): container finished" podID="075da87a-0ab9-462b-9435-5881b90bd9a3" containerID="3210ea976023a897587e17fc45685184e786eb8ce297c0e13bbe66dddad4d0d9" exitCode=0 Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.507386 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-k7wz7" event={"ID":"075da87a-0ab9-462b-9435-5881b90bd9a3","Type":"ContainerDied","Data":"3210ea976023a897587e17fc45685184e786eb8ce297c0e13bbe66dddad4d0d9"} Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.515870 4982 generic.go:334] "Generic (PLEG): container finished" podID="b33cca72-9a2b-4042-b38e-3f01396d064c" containerID="da898f364e9f895b46391b48d85e9de6ac79239ad0635ebb462fad8ea60dfe57" exitCode=0 Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.515916 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zz5w6" event={"ID":"b33cca72-9a2b-4042-b38e-3f01396d064c","Type":"ContainerDied","Data":"da898f364e9f895b46391b48d85e9de6ac79239ad0635ebb462fad8ea60dfe57"} Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.517787 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") on node \"crc\" " Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.517812 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.517824 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.517840 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmnn9\" (UniqueName: \"kubernetes.io/projected/7998d8ba-9473-4cb5-abeb-29654115491f-kube-api-access-gmnn9\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.517852 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7998d8ba-9473-4cb5-abeb-29654115491f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.520295 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36cc8bb3-de84-43c8-9b70-f14d2532598b","Type":"ContainerStarted","Data":"35edd4ebb2af300e8a163647342d3edd88fce611210dcd2d7df66664064ebb92"} Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.529300 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.531058 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7998d8ba-9473-4cb5-abeb-29654115491f","Type":"ContainerDied","Data":"e3337a401e1471c90df2e51aaea85292ed0d83930b904496b733df114f9ee1cc"} Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.531249 4982 scope.go:117] "RemoveContainer" containerID="658b9b54b24dd81dcfd656166686b1b6238f26258369789dfdfa59309615f8e2" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.558367 4982 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.558554 4982 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-9fbefc77-3371-4465-80eb-112fbd1c349b" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b") on node "crc" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.590352 4982 scope.go:117] "RemoveContainer" containerID="7dac6f29a657ab6ae301da630d6f9c073e3ed56c7bf90cc0d5ac96433feeaae4" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.619480 4982 reconciler_common.go:293] "Volume detached for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.624339 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.643257 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.659504 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:35:27 crc kubenswrapper[4982]: E1205 19:35:27.660033 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7998d8ba-9473-4cb5-abeb-29654115491f" containerName="glance-httpd" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.660051 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7998d8ba-9473-4cb5-abeb-29654115491f" containerName="glance-httpd" Dec 05 19:35:27 crc kubenswrapper[4982]: E1205 19:35:27.660076 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7998d8ba-9473-4cb5-abeb-29654115491f" containerName="glance-log" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.660084 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7998d8ba-9473-4cb5-abeb-29654115491f" containerName="glance-log" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.660302 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7998d8ba-9473-4cb5-abeb-29654115491f" containerName="glance-httpd" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.660320 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7998d8ba-9473-4cb5-abeb-29654115491f" containerName="glance-log" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.661554 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.663683 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.663853 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.671291 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.704202 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.704253 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.759864 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.764703 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.824308 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.824412 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.824456 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.824481 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-logs\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.824545 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzqxt\" (UniqueName: \"kubernetes.io/projected/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-kube-api-access-mzqxt\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.824653 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.824700 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.824721 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.925983 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.926024 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.926054 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.926093 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.926123 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.926162 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-logs\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.926218 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzqxt\" (UniqueName: \"kubernetes.io/projected/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-kube-api-access-mzqxt\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.926295 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.926740 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-logs\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.927334 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.929040 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.929070 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/31401ebfd1660da8e240395dcb349960c63d08b34963a13b7abf991967d4dead/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.940041 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-config-data\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.943442 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.946002 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.946116 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-scripts\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.950126 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzqxt\" (UniqueName: \"kubernetes.io/projected/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-kube-api-access-mzqxt\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.977436 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:35:27 crc kubenswrapper[4982]: I1205 19:35:27.997133 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:28 crc kubenswrapper[4982]: I1205 19:35:28.544061 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 19:35:28 crc kubenswrapper[4982]: I1205 19:35:28.544442 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 19:35:28 crc kubenswrapper[4982]: I1205 19:35:28.594907 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:35:28 crc kubenswrapper[4982]: W1205 19:35:28.605599 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4e81bbea_ba1a_47f3_8b77_eaf44e3e0039.slice/crio-a8ef43f7877ed09361aa2eaa6154ce47d1d5411c0de29ec05e785edd13623197 WatchSource:0}: Error finding container a8ef43f7877ed09361aa2eaa6154ce47d1d5411c0de29ec05e785edd13623197: Status 404 returned error can't find the container with id a8ef43f7877ed09361aa2eaa6154ce47d1d5411c0de29ec05e785edd13623197 Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.066476 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-k7wz7" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.083453 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.169066 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-combined-ca-bundle\") pod \"075da87a-0ab9-462b-9435-5881b90bd9a3\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.169262 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-credential-keys\") pod \"b33cca72-9a2b-4042-b38e-3f01396d064c\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.169323 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2sxx\" (UniqueName: \"kubernetes.io/projected/b33cca72-9a2b-4042-b38e-3f01396d064c-kube-api-access-z2sxx\") pod \"b33cca72-9a2b-4042-b38e-3f01396d064c\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.169399 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-config-data\") pod \"b33cca72-9a2b-4042-b38e-3f01396d064c\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.169447 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-scripts\") pod \"075da87a-0ab9-462b-9435-5881b90bd9a3\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.169468 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-fernet-keys\") pod \"b33cca72-9a2b-4042-b38e-3f01396d064c\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.169491 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-combined-ca-bundle\") pod \"b33cca72-9a2b-4042-b38e-3f01396d064c\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.169571 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jjm4c\" (UniqueName: \"kubernetes.io/projected/075da87a-0ab9-462b-9435-5881b90bd9a3-kube-api-access-jjm4c\") pod \"075da87a-0ab9-462b-9435-5881b90bd9a3\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.169593 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-scripts\") pod \"b33cca72-9a2b-4042-b38e-3f01396d064c\" (UID: \"b33cca72-9a2b-4042-b38e-3f01396d064c\") " Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.169663 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-config-data\") pod \"075da87a-0ab9-462b-9435-5881b90bd9a3\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.169738 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/075da87a-0ab9-462b-9435-5881b90bd9a3-logs\") pod \"075da87a-0ab9-462b-9435-5881b90bd9a3\" (UID: \"075da87a-0ab9-462b-9435-5881b90bd9a3\") " Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.171294 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/075da87a-0ab9-462b-9435-5881b90bd9a3-logs" (OuterVolumeSpecName: "logs") pod "075da87a-0ab9-462b-9435-5881b90bd9a3" (UID: "075da87a-0ab9-462b-9435-5881b90bd9a3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.175601 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b33cca72-9a2b-4042-b38e-3f01396d064c" (UID: "b33cca72-9a2b-4042-b38e-3f01396d064c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.179204 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-scripts" (OuterVolumeSpecName: "scripts") pod "075da87a-0ab9-462b-9435-5881b90bd9a3" (UID: "075da87a-0ab9-462b-9435-5881b90bd9a3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.179367 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b33cca72-9a2b-4042-b38e-3f01396d064c" (UID: "b33cca72-9a2b-4042-b38e-3f01396d064c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.179474 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b33cca72-9a2b-4042-b38e-3f01396d064c-kube-api-access-z2sxx" (OuterVolumeSpecName: "kube-api-access-z2sxx") pod "b33cca72-9a2b-4042-b38e-3f01396d064c" (UID: "b33cca72-9a2b-4042-b38e-3f01396d064c"). InnerVolumeSpecName "kube-api-access-z2sxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.182504 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/075da87a-0ab9-462b-9435-5881b90bd9a3-kube-api-access-jjm4c" (OuterVolumeSpecName: "kube-api-access-jjm4c") pod "075da87a-0ab9-462b-9435-5881b90bd9a3" (UID: "075da87a-0ab9-462b-9435-5881b90bd9a3"). InnerVolumeSpecName "kube-api-access-jjm4c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.182723 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-scripts" (OuterVolumeSpecName: "scripts") pod "b33cca72-9a2b-4042-b38e-3f01396d064c" (UID: "b33cca72-9a2b-4042-b38e-3f01396d064c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.224271 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "075da87a-0ab9-462b-9435-5881b90bd9a3" (UID: "075da87a-0ab9-462b-9435-5881b90bd9a3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.232361 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b33cca72-9a2b-4042-b38e-3f01396d064c" (UID: "b33cca72-9a2b-4042-b38e-3f01396d064c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.233835 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-config-data" (OuterVolumeSpecName: "config-data") pod "075da87a-0ab9-462b-9435-5881b90bd9a3" (UID: "075da87a-0ab9-462b-9435-5881b90bd9a3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.237167 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-config-data" (OuterVolumeSpecName: "config-data") pod "b33cca72-9a2b-4042-b38e-3f01396d064c" (UID: "b33cca72-9a2b-4042-b38e-3f01396d064c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.272266 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.272300 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/075da87a-0ab9-462b-9435-5881b90bd9a3-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.272308 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.272317 4982 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.272326 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2sxx\" (UniqueName: \"kubernetes.io/projected/b33cca72-9a2b-4042-b38e-3f01396d064c-kube-api-access-z2sxx\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.272335 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.272343 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/075da87a-0ab9-462b-9435-5881b90bd9a3-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.272350 4982 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.272358 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.272366 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jjm4c\" (UniqueName: \"kubernetes.io/projected/075da87a-0ab9-462b-9435-5881b90bd9a3-kube-api-access-jjm4c\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.272374 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b33cca72-9a2b-4042-b38e-3f01396d064c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.402514 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7998d8ba-9473-4cb5-abeb-29654115491f" path="/var/lib/kubelet/pods/7998d8ba-9473-4cb5-abeb-29654115491f/volumes" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.571997 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039","Type":"ContainerStarted","Data":"2738a0bbfda3397a5d061f2c64da95ca95b1573dd45749e895f1c1ae38faa248"} Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.572288 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039","Type":"ContainerStarted","Data":"a8ef43f7877ed09361aa2eaa6154ce47d1d5411c0de29ec05e785edd13623197"} Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.577961 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-k7wz7" event={"ID":"075da87a-0ab9-462b-9435-5881b90bd9a3","Type":"ContainerDied","Data":"a2bb78c16bf9d57a52508ab64e1f92ce9c935dd238ea9248439b36d708515b07"} Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.577997 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2bb78c16bf9d57a52508ab64e1f92ce9c935dd238ea9248439b36d708515b07" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.577977 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-k7wz7" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.588222 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-zz5w6" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.588644 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-zz5w6" event={"ID":"b33cca72-9a2b-4042-b38e-3f01396d064c","Type":"ContainerDied","Data":"dfba08d66836f7c36cab418db175eabdffe31e374ce1748bb4f89b6f283810c4"} Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.588665 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfba08d66836f7c36cab418db175eabdffe31e374ce1748bb4f89b6f283810c4" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.659944 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-59b8477896-ww7nl"] Dec 05 19:35:29 crc kubenswrapper[4982]: E1205 19:35:29.660363 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="075da87a-0ab9-462b-9435-5881b90bd9a3" containerName="placement-db-sync" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.660380 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="075da87a-0ab9-462b-9435-5881b90bd9a3" containerName="placement-db-sync" Dec 05 19:35:29 crc kubenswrapper[4982]: E1205 19:35:29.660411 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b33cca72-9a2b-4042-b38e-3f01396d064c" containerName="keystone-bootstrap" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.660417 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b33cca72-9a2b-4042-b38e-3f01396d064c" containerName="keystone-bootstrap" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.660597 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b33cca72-9a2b-4042-b38e-3f01396d064c" containerName="keystone-bootstrap" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.660625 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="075da87a-0ab9-462b-9435-5881b90bd9a3" containerName="placement-db-sync" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.661625 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.672609 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-59b8477896-ww7nl"] Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.673476 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.673661 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-gdhhg" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.673783 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.673891 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.674013 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.733344 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-768c967fc5-mm4fv"] Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.734929 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.743108 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.743574 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.743855 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-knkd7" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.744001 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.744076 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.744208 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.749640 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-768c967fc5-mm4fv"] Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.781282 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-internal-tls-certs\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.781351 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7489b\" (UniqueName: \"kubernetes.io/projected/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-kube-api-access-7489b\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.781378 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-config-data\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.781530 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-scripts\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.781607 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-combined-ca-bundle\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.781653 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-logs\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.781705 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-public-tls-certs\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883104 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-credential-keys\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883209 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-scripts\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883253 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-public-tls-certs\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883324 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-config-data\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883371 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-public-tls-certs\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883409 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-internal-tls-certs\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883438 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7489b\" (UniqueName: \"kubernetes.io/projected/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-kube-api-access-7489b\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883460 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-config-data\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883475 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-fernet-keys\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883488 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-internal-tls-certs\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883514 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-combined-ca-bundle\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883558 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-scripts\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883584 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-combined-ca-bundle\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883605 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-logs\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.883633 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gd7n9\" (UniqueName: \"kubernetes.io/projected/87f55498-f78b-4201-9970-b393206ddabf-kube-api-access-gd7n9\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.886278 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-logs\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.894827 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-internal-tls-certs\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.895014 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-public-tls-certs\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.895276 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-scripts\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.895456 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-config-data\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.907724 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-combined-ca-bundle\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.913660 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7489b\" (UniqueName: \"kubernetes.io/projected/c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235-kube-api-access-7489b\") pod \"placement-59b8477896-ww7nl\" (UID: \"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235\") " pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.977908 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.984989 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-config-data\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.985057 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-public-tls-certs\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.985140 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-fernet-keys\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.985182 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-internal-tls-certs\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.985214 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-combined-ca-bundle\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.985298 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gd7n9\" (UniqueName: \"kubernetes.io/projected/87f55498-f78b-4201-9970-b393206ddabf-kube-api-access-gd7n9\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.985337 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-credential-keys\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.985368 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-scripts\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:29 crc kubenswrapper[4982]: I1205 19:35:29.989476 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-scripts\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:30 crc kubenswrapper[4982]: I1205 19:35:30.005072 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-config-data\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:30 crc kubenswrapper[4982]: I1205 19:35:30.005258 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-internal-tls-certs\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:30 crc kubenswrapper[4982]: I1205 19:35:30.007398 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-public-tls-certs\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:30 crc kubenswrapper[4982]: I1205 19:35:30.014195 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-credential-keys\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:30 crc kubenswrapper[4982]: I1205 19:35:30.020295 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gd7n9\" (UniqueName: \"kubernetes.io/projected/87f55498-f78b-4201-9970-b393206ddabf-kube-api-access-gd7n9\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:30 crc kubenswrapper[4982]: I1205 19:35:30.020569 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-fernet-keys\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:30 crc kubenswrapper[4982]: I1205 19:35:30.021753 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87f55498-f78b-4201-9970-b393206ddabf-combined-ca-bundle\") pod \"keystone-768c967fc5-mm4fv\" (UID: \"87f55498-f78b-4201-9970-b393206ddabf\") " pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:30 crc kubenswrapper[4982]: I1205 19:35:30.087410 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:30 crc kubenswrapper[4982]: I1205 19:35:30.621804 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039","Type":"ContainerStarted","Data":"8d88d572844c2446dd092f139ccfa43054b9e61b1e06ce3394126ebd6a3b5391"} Dec 05 19:35:30 crc kubenswrapper[4982]: I1205 19:35:30.677206 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.677185482 podStartE2EDuration="3.677185482s" podCreationTimestamp="2025-12-05 19:35:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:30.671004657 +0000 UTC m=+1309.552890652" watchObservedRunningTime="2025-12-05 19:35:30.677185482 +0000 UTC m=+1309.559071477" Dec 05 19:35:34 crc kubenswrapper[4982]: I1205 19:35:34.000307 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:35:34 crc kubenswrapper[4982]: I1205 19:35:34.086680 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-wg4zp"] Dec 05 19:35:34 crc kubenswrapper[4982]: I1205 19:35:34.086948 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" podUID="63edd3ae-b9c8-454c-a132-9ebd454761ce" containerName="dnsmasq-dns" containerID="cri-o://62c345163f2b8059ac9d37944c1bfbb1d4e4df0503364852f8dbf651165506ba" gracePeriod=10 Dec 05 19:35:34 crc kubenswrapper[4982]: I1205 19:35:34.673802 4982 generic.go:334] "Generic (PLEG): container finished" podID="63edd3ae-b9c8-454c-a132-9ebd454761ce" containerID="62c345163f2b8059ac9d37944c1bfbb1d4e4df0503364852f8dbf651165506ba" exitCode=0 Dec 05 19:35:34 crc kubenswrapper[4982]: I1205 19:35:34.674087 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" event={"ID":"63edd3ae-b9c8-454c-a132-9ebd454761ce","Type":"ContainerDied","Data":"62c345163f2b8059ac9d37944c1bfbb1d4e4df0503364852f8dbf651165506ba"} Dec 05 19:35:35 crc kubenswrapper[4982]: I1205 19:35:35.116845 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 19:35:37 crc kubenswrapper[4982]: I1205 19:35:37.195416 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.002096 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.003182 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.044079 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.047748 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.539634 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.692911 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-ovsdbserver-nb\") pod \"63edd3ae-b9c8-454c-a132-9ebd454761ce\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.692992 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g85fg\" (UniqueName: \"kubernetes.io/projected/63edd3ae-b9c8-454c-a132-9ebd454761ce-kube-api-access-g85fg\") pod \"63edd3ae-b9c8-454c-a132-9ebd454761ce\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.693048 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-dns-svc\") pod \"63edd3ae-b9c8-454c-a132-9ebd454761ce\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.693078 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-ovsdbserver-sb\") pod \"63edd3ae-b9c8-454c-a132-9ebd454761ce\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.693132 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-config\") pod \"63edd3ae-b9c8-454c-a132-9ebd454761ce\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.693196 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-dns-swift-storage-0\") pod \"63edd3ae-b9c8-454c-a132-9ebd454761ce\" (UID: \"63edd3ae-b9c8-454c-a132-9ebd454761ce\") " Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.698693 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63edd3ae-b9c8-454c-a132-9ebd454761ce-kube-api-access-g85fg" (OuterVolumeSpecName: "kube-api-access-g85fg") pod "63edd3ae-b9c8-454c-a132-9ebd454761ce" (UID: "63edd3ae-b9c8-454c-a132-9ebd454761ce"). InnerVolumeSpecName "kube-api-access-g85fg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.763218 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "63edd3ae-b9c8-454c-a132-9ebd454761ce" (UID: "63edd3ae-b9c8-454c-a132-9ebd454761ce"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.763189 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "63edd3ae-b9c8-454c-a132-9ebd454761ce" (UID: "63edd3ae-b9c8-454c-a132-9ebd454761ce"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.772838 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-config" (OuterVolumeSpecName: "config") pod "63edd3ae-b9c8-454c-a132-9ebd454761ce" (UID: "63edd3ae-b9c8-454c-a132-9ebd454761ce"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.796049 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.796090 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g85fg\" (UniqueName: \"kubernetes.io/projected/63edd3ae-b9c8-454c-a132-9ebd454761ce-kube-api-access-g85fg\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.796101 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.796110 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.798665 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "63edd3ae-b9c8-454c-a132-9ebd454761ce" (UID: "63edd3ae-b9c8-454c-a132-9ebd454761ce"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.818351 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "63edd3ae-b9c8-454c-a132-9ebd454761ce" (UID: "63edd3ae-b9c8-454c-a132-9ebd454761ce"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.821737 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-59b8477896-ww7nl"] Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.831877 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" event={"ID":"63edd3ae-b9c8-454c-a132-9ebd454761ce","Type":"ContainerDied","Data":"dbcab1ee045056a3359d4312e0645168ad09ed0e1f5e4f7e386675189a149332"} Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.831931 4982 scope.go:117] "RemoveContainer" containerID="62c345163f2b8059ac9d37944c1bfbb1d4e4df0503364852f8dbf651165506ba" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.832095 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57c957c4ff-wg4zp" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.853329 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36cc8bb3-de84-43c8-9b70-f14d2532598b","Type":"ContainerStarted","Data":"cd9ae4591d12ee70f2340c2ec4fc262df617daffd39eb2e48f07d64200f7a193"} Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.860043 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6vvtc" event={"ID":"641d839f-9ca5-4835-ba20-2c6981a00df3","Type":"ContainerStarted","Data":"afc1350bbe3ae2c1afb759fb8b16c3a0f2a70d5a59529d9372f5f0fd325cc0a1"} Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.866160 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.866202 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.875939 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-wg4zp"] Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.898982 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.899014 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/63edd3ae-b9c8-454c-a132-9ebd454761ce-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.899486 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57c957c4ff-wg4zp"] Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.902022 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-6vvtc" podStartSLOduration=3.274013734 podStartE2EDuration="1m1.902007562s" podCreationTimestamp="2025-12-05 19:34:37 +0000 UTC" firstStartedPulling="2025-12-05 19:34:39.706396728 +0000 UTC m=+1258.588282723" lastFinishedPulling="2025-12-05 19:35:38.334390556 +0000 UTC m=+1317.216276551" observedRunningTime="2025-12-05 19:35:38.882737577 +0000 UTC m=+1317.764623582" watchObservedRunningTime="2025-12-05 19:35:38.902007562 +0000 UTC m=+1317.783893567" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.905390 4982 scope.go:117] "RemoveContainer" containerID="c1ed987370df7ccaf870832354726d074b49ad774bf72f90c52d52a48ea57779" Dec 05 19:35:38 crc kubenswrapper[4982]: I1205 19:35:38.965276 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-768c967fc5-mm4fv"] Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.407400 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63edd3ae-b9c8-454c-a132-9ebd454761ce" path="/var/lib/kubelet/pods/63edd3ae-b9c8-454c-a132-9ebd454761ce/volumes" Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.873659 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qb5jh" event={"ID":"253ffb42-0135-4d3b-b21c-0810b4591a69","Type":"ContainerStarted","Data":"13717ee3c226504c6bde18df5a3db4eee28b8af889897da70f9a7c99dca4fbe4"} Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.876894 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-59b8477896-ww7nl" event={"ID":"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235","Type":"ContainerStarted","Data":"5d0995829c68866646e6fb465809d1aafbd106370327fcbfab8c505d3f0ce60f"} Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.877090 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-59b8477896-ww7nl" event={"ID":"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235","Type":"ContainerStarted","Data":"bdd6505700b29acaa0a9f32642988b4598c175305b70cca4ade21a7b92e45a63"} Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.877201 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-59b8477896-ww7nl" event={"ID":"c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235","Type":"ContainerStarted","Data":"8572ccddb31c2313096eb1f1bb9ce955cf318368bd54db917bec226cb0238f56"} Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.877434 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.877468 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.879432 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-hf7s9" event={"ID":"58e0a579-42f9-40b8-a0b4-13902c0fe8c7","Type":"ContainerStarted","Data":"3b80ddac6063bb6f87b2ba84d52d005d5acae4b4ba923c8336963ddccccc3f68"} Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.881027 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-768c967fc5-mm4fv" event={"ID":"87f55498-f78b-4201-9970-b393206ddabf","Type":"ContainerStarted","Data":"2e7ca486fadd55c2a1fec6652fc744a35ba3d7d35c1a014d311530bf2770b38c"} Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.881059 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-768c967fc5-mm4fv" event={"ID":"87f55498-f78b-4201-9970-b393206ddabf","Type":"ContainerStarted","Data":"bbca541c28a9e52c491d738fb5431cf6720989b199829aaa8d06f6c53d0c89a7"} Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.881294 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.890818 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-qb5jh" podStartSLOduration=3.697191423 podStartE2EDuration="1m2.890802257s" podCreationTimestamp="2025-12-05 19:34:37 +0000 UTC" firstStartedPulling="2025-12-05 19:34:39.145214354 +0000 UTC m=+1258.027100349" lastFinishedPulling="2025-12-05 19:35:38.338825188 +0000 UTC m=+1317.220711183" observedRunningTime="2025-12-05 19:35:39.887059922 +0000 UTC m=+1318.768945907" watchObservedRunningTime="2025-12-05 19:35:39.890802257 +0000 UTC m=+1318.772688252" Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.926997 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-59b8477896-ww7nl" podStartSLOduration=10.926978017 podStartE2EDuration="10.926978017s" podCreationTimestamp="2025-12-05 19:35:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:39.923800387 +0000 UTC m=+1318.805686402" watchObservedRunningTime="2025-12-05 19:35:39.926978017 +0000 UTC m=+1318.808864002" Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.943803 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-hf7s9" podStartSLOduration=4.099681943 podStartE2EDuration="1m2.94378872s" podCreationTimestamp="2025-12-05 19:34:37 +0000 UTC" firstStartedPulling="2025-12-05 19:34:40.136313898 +0000 UTC m=+1259.018199893" lastFinishedPulling="2025-12-05 19:35:38.980420675 +0000 UTC m=+1317.862306670" observedRunningTime="2025-12-05 19:35:39.938714602 +0000 UTC m=+1318.820600617" watchObservedRunningTime="2025-12-05 19:35:39.94378872 +0000 UTC m=+1318.825674715" Dec 05 19:35:39 crc kubenswrapper[4982]: I1205 19:35:39.963370 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-768c967fc5-mm4fv" podStartSLOduration=10.963352723 podStartE2EDuration="10.963352723s" podCreationTimestamp="2025-12-05 19:35:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:39.956723636 +0000 UTC m=+1318.838609631" watchObservedRunningTime="2025-12-05 19:35:39.963352723 +0000 UTC m=+1318.845238718" Dec 05 19:35:41 crc kubenswrapper[4982]: I1205 19:35:41.008885 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:41 crc kubenswrapper[4982]: I1205 19:35:41.009035 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 19:35:41 crc kubenswrapper[4982]: I1205 19:35:41.429044 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 19:35:41 crc kubenswrapper[4982]: I1205 19:35:41.899662 4982 generic.go:334] "Generic (PLEG): container finished" podID="641d839f-9ca5-4835-ba20-2c6981a00df3" containerID="afc1350bbe3ae2c1afb759fb8b16c3a0f2a70d5a59529d9372f5f0fd325cc0a1" exitCode=0 Dec 05 19:35:41 crc kubenswrapper[4982]: I1205 19:35:41.899891 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6vvtc" event={"ID":"641d839f-9ca5-4835-ba20-2c6981a00df3","Type":"ContainerDied","Data":"afc1350bbe3ae2c1afb759fb8b16c3a0f2a70d5a59529d9372f5f0fd325cc0a1"} Dec 05 19:35:44 crc kubenswrapper[4982]: I1205 19:35:44.944189 4982 generic.go:334] "Generic (PLEG): container finished" podID="253ffb42-0135-4d3b-b21c-0810b4591a69" containerID="13717ee3c226504c6bde18df5a3db4eee28b8af889897da70f9a7c99dca4fbe4" exitCode=0 Dec 05 19:35:44 crc kubenswrapper[4982]: I1205 19:35:44.944296 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qb5jh" event={"ID":"253ffb42-0135-4d3b-b21c-0810b4591a69","Type":"ContainerDied","Data":"13717ee3c226504c6bde18df5a3db4eee28b8af889897da70f9a7c99dca4fbe4"} Dec 05 19:35:44 crc kubenswrapper[4982]: I1205 19:35:44.946542 4982 generic.go:334] "Generic (PLEG): container finished" podID="58e0a579-42f9-40b8-a0b4-13902c0fe8c7" containerID="3b80ddac6063bb6f87b2ba84d52d005d5acae4b4ba923c8336963ddccccc3f68" exitCode=0 Dec 05 19:35:44 crc kubenswrapper[4982]: I1205 19:35:44.946583 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-hf7s9" event={"ID":"58e0a579-42f9-40b8-a0b4-13902c0fe8c7","Type":"ContainerDied","Data":"3b80ddac6063bb6f87b2ba84d52d005d5acae4b4ba923c8336963ddccccc3f68"} Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.346911 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.463279 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/641d839f-9ca5-4835-ba20-2c6981a00df3-combined-ca-bundle\") pod \"641d839f-9ca5-4835-ba20-2c6981a00df3\" (UID: \"641d839f-9ca5-4835-ba20-2c6981a00df3\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.463592 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/641d839f-9ca5-4835-ba20-2c6981a00df3-db-sync-config-data\") pod \"641d839f-9ca5-4835-ba20-2c6981a00df3\" (UID: \"641d839f-9ca5-4835-ba20-2c6981a00df3\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.463720 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-97stg\" (UniqueName: \"kubernetes.io/projected/641d839f-9ca5-4835-ba20-2c6981a00df3-kube-api-access-97stg\") pod \"641d839f-9ca5-4835-ba20-2c6981a00df3\" (UID: \"641d839f-9ca5-4835-ba20-2c6981a00df3\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.469738 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/641d839f-9ca5-4835-ba20-2c6981a00df3-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "641d839f-9ca5-4835-ba20-2c6981a00df3" (UID: "641d839f-9ca5-4835-ba20-2c6981a00df3"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.475251 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/641d839f-9ca5-4835-ba20-2c6981a00df3-kube-api-access-97stg" (OuterVolumeSpecName: "kube-api-access-97stg") pod "641d839f-9ca5-4835-ba20-2c6981a00df3" (UID: "641d839f-9ca5-4835-ba20-2c6981a00df3"). InnerVolumeSpecName "kube-api-access-97stg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.496526 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/641d839f-9ca5-4835-ba20-2c6981a00df3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "641d839f-9ca5-4835-ba20-2c6981a00df3" (UID: "641d839f-9ca5-4835-ba20-2c6981a00df3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.566360 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/641d839f-9ca5-4835-ba20-2c6981a00df3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.566398 4982 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/641d839f-9ca5-4835-ba20-2c6981a00df3-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.566413 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-97stg\" (UniqueName: \"kubernetes.io/projected/641d839f-9ca5-4835-ba20-2c6981a00df3-kube-api-access-97stg\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.799272 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.807017 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.874545 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-certs\") pod \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.874583 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-scripts\") pod \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.874629 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-scripts\") pod \"253ffb42-0135-4d3b-b21c-0810b4591a69\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.874651 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-db-sync-config-data\") pod \"253ffb42-0135-4d3b-b21c-0810b4591a69\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.875213 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-combined-ca-bundle\") pod \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.875267 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6m7v\" (UniqueName: \"kubernetes.io/projected/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-kube-api-access-h6m7v\") pod \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.875296 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-config-data\") pod \"253ffb42-0135-4d3b-b21c-0810b4591a69\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.875339 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-config-data\") pod \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\" (UID: \"58e0a579-42f9-40b8-a0b4-13902c0fe8c7\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.875475 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/253ffb42-0135-4d3b-b21c-0810b4591a69-etc-machine-id\") pod \"253ffb42-0135-4d3b-b21c-0810b4591a69\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.875530 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m7qrv\" (UniqueName: \"kubernetes.io/projected/253ffb42-0135-4d3b-b21c-0810b4591a69-kube-api-access-m7qrv\") pod \"253ffb42-0135-4d3b-b21c-0810b4591a69\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.875569 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-combined-ca-bundle\") pod \"253ffb42-0135-4d3b-b21c-0810b4591a69\" (UID: \"253ffb42-0135-4d3b-b21c-0810b4591a69\") " Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.876182 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/253ffb42-0135-4d3b-b21c-0810b4591a69-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "253ffb42-0135-4d3b-b21c-0810b4591a69" (UID: "253ffb42-0135-4d3b-b21c-0810b4591a69"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.879506 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-scripts" (OuterVolumeSpecName: "scripts") pod "58e0a579-42f9-40b8-a0b4-13902c0fe8c7" (UID: "58e0a579-42f9-40b8-a0b4-13902c0fe8c7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.879679 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "253ffb42-0135-4d3b-b21c-0810b4591a69" (UID: "253ffb42-0135-4d3b-b21c-0810b4591a69"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.880804 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-certs" (OuterVolumeSpecName: "certs") pod "58e0a579-42f9-40b8-a0b4-13902c0fe8c7" (UID: "58e0a579-42f9-40b8-a0b4-13902c0fe8c7"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.880862 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/253ffb42-0135-4d3b-b21c-0810b4591a69-kube-api-access-m7qrv" (OuterVolumeSpecName: "kube-api-access-m7qrv") pod "253ffb42-0135-4d3b-b21c-0810b4591a69" (UID: "253ffb42-0135-4d3b-b21c-0810b4591a69"). InnerVolumeSpecName "kube-api-access-m7qrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.881480 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-kube-api-access-h6m7v" (OuterVolumeSpecName: "kube-api-access-h6m7v") pod "58e0a579-42f9-40b8-a0b4-13902c0fe8c7" (UID: "58e0a579-42f9-40b8-a0b4-13902c0fe8c7"). InnerVolumeSpecName "kube-api-access-h6m7v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.882618 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-scripts" (OuterVolumeSpecName: "scripts") pod "253ffb42-0135-4d3b-b21c-0810b4591a69" (UID: "253ffb42-0135-4d3b-b21c-0810b4591a69"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.920356 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-config-data" (OuterVolumeSpecName: "config-data") pod "58e0a579-42f9-40b8-a0b4-13902c0fe8c7" (UID: "58e0a579-42f9-40b8-a0b4-13902c0fe8c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.920804 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "58e0a579-42f9-40b8-a0b4-13902c0fe8c7" (UID: "58e0a579-42f9-40b8-a0b4-13902c0fe8c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.926426 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "253ffb42-0135-4d3b-b21c-0810b4591a69" (UID: "253ffb42-0135-4d3b-b21c-0810b4591a69"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.938028 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-config-data" (OuterVolumeSpecName: "config-data") pod "253ffb42-0135-4d3b-b21c-0810b4591a69" (UID: "253ffb42-0135-4d3b-b21c-0810b4591a69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.964492 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-6vvtc" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.964486 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-6vvtc" event={"ID":"641d839f-9ca5-4835-ba20-2c6981a00df3","Type":"ContainerDied","Data":"51a76ae3ae48e7f961fcf967baca18e806630b5e88c75ba49b5231ab4b6b5f83"} Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.964597 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="51a76ae3ae48e7f961fcf967baca18e806630b5e88c75ba49b5231ab4b6b5f83" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.966288 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-qb5jh" event={"ID":"253ffb42-0135-4d3b-b21c-0810b4591a69","Type":"ContainerDied","Data":"b2972c8e05a1fd06548ad3e10d8be2a7fe6d72b692c52703e7cf00bab06eae1c"} Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.966320 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2972c8e05a1fd06548ad3e10d8be2a7fe6d72b692c52703e7cf00bab06eae1c" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.966365 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-qb5jh" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.968898 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-hf7s9" event={"ID":"58e0a579-42f9-40b8-a0b4-13902c0fe8c7","Type":"ContainerDied","Data":"9ad1e62856059f48b0af068eb4f1bd5c5ed662cd8897f91dd1aab264d5db8438"} Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.968968 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ad1e62856059f48b0af068eb4f1bd5c5ed662cd8897f91dd1aab264d5db8438" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.969078 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-hf7s9" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.977709 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m7qrv\" (UniqueName: \"kubernetes.io/projected/253ffb42-0135-4d3b-b21c-0810b4591a69-kube-api-access-m7qrv\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.977737 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.977747 4982 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.977755 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.977764 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.977771 4982 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.977780 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.977789 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6m7v\" (UniqueName: \"kubernetes.io/projected/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-kube-api-access-h6m7v\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.977797 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/253ffb42-0135-4d3b-b21c-0810b4591a69-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.977805 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58e0a579-42f9-40b8-a0b4-13902c0fe8c7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:46 crc kubenswrapper[4982]: I1205 19:35:46.977815 4982 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/253ffb42-0135-4d3b-b21c-0810b4591a69-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.121240 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-pmmkj"] Dec 05 19:35:47 crc kubenswrapper[4982]: E1205 19:35:47.122060 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63edd3ae-b9c8-454c-a132-9ebd454761ce" containerName="dnsmasq-dns" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.122084 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="63edd3ae-b9c8-454c-a132-9ebd454761ce" containerName="dnsmasq-dns" Dec 05 19:35:47 crc kubenswrapper[4982]: E1205 19:35:47.122111 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63edd3ae-b9c8-454c-a132-9ebd454761ce" containerName="init" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.122120 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="63edd3ae-b9c8-454c-a132-9ebd454761ce" containerName="init" Dec 05 19:35:47 crc kubenswrapper[4982]: E1205 19:35:47.122142 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="641d839f-9ca5-4835-ba20-2c6981a00df3" containerName="barbican-db-sync" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.122169 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="641d839f-9ca5-4835-ba20-2c6981a00df3" containerName="barbican-db-sync" Dec 05 19:35:47 crc kubenswrapper[4982]: E1205 19:35:47.122186 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58e0a579-42f9-40b8-a0b4-13902c0fe8c7" containerName="cloudkitty-db-sync" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.122194 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="58e0a579-42f9-40b8-a0b4-13902c0fe8c7" containerName="cloudkitty-db-sync" Dec 05 19:35:47 crc kubenswrapper[4982]: E1205 19:35:47.122207 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="253ffb42-0135-4d3b-b21c-0810b4591a69" containerName="cinder-db-sync" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.122216 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="253ffb42-0135-4d3b-b21c-0810b4591a69" containerName="cinder-db-sync" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.122440 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="58e0a579-42f9-40b8-a0b4-13902c0fe8c7" containerName="cloudkitty-db-sync" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.122460 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="63edd3ae-b9c8-454c-a132-9ebd454761ce" containerName="dnsmasq-dns" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.122478 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="641d839f-9ca5-4835-ba20-2c6981a00df3" containerName="barbican-db-sync" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.122501 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="253ffb42-0135-4d3b-b21c-0810b4591a69" containerName="cinder-db-sync" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.123388 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.126473 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.126703 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.126814 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.126921 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-xxdrk" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.127178 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.134857 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-pmmkj"] Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.181675 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/a9f9181c-b863-4744-bc40-5fd51918c5bd-certs\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.181779 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htr8r\" (UniqueName: \"kubernetes.io/projected/a9f9181c-b863-4744-bc40-5fd51918c5bd-kube-api-access-htr8r\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.181812 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-config-data\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.181869 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-combined-ca-bundle\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.182056 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-scripts\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.252587 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.264847 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.271636 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.278305 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.278402 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-htk2q" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.279058 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.284231 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htr8r\" (UniqueName: \"kubernetes.io/projected/a9f9181c-b863-4744-bc40-5fd51918c5bd-kube-api-access-htr8r\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.284286 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-config-data\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.284345 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-combined-ca-bundle\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.284395 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-scripts\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.284430 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/a9f9181c-b863-4744-bc40-5fd51918c5bd-certs\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.289394 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/a9f9181c-b863-4744-bc40-5fd51918c5bd-certs\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.293061 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-config-data\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.296010 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-combined-ca-bundle\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.305845 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.313436 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-scripts\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.333920 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htr8r\" (UniqueName: \"kubernetes.io/projected/a9f9181c-b863-4744-bc40-5fd51918c5bd-kube-api-access-htr8r\") pod \"cloudkitty-storageinit-pmmkj\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.386230 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/302649c4-5724-464e-a677-269161f1fb69-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.386275 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-config-data\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.386303 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gsgd\" (UniqueName: \"kubernetes.io/projected/302649c4-5724-464e-a677-269161f1fb69-kube-api-access-6gsgd\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.386337 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.386373 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.386390 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-scripts\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.443673 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8775748c9-2xss8"] Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.445196 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8775748c9-2xss8"] Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.445293 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.448962 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.494393 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/302649c4-5724-464e-a677-269161f1fb69-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.494569 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-config-data\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.494606 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gsgd\" (UniqueName: \"kubernetes.io/projected/302649c4-5724-464e-a677-269161f1fb69-kube-api-access-6gsgd\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.494645 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.494708 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.494728 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-scripts\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.497127 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/302649c4-5724-464e-a677-269161f1fb69-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.500260 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-scripts\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.500780 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.507795 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.508036 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-config-data\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.536265 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gsgd\" (UniqueName: \"kubernetes.io/projected/302649c4-5724-464e-a677-269161f1fb69-kube-api-access-6gsgd\") pod \"cinder-scheduler-0\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.600938 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-ovsdbserver-sb\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.600986 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57922\" (UniqueName: \"kubernetes.io/projected/94eef6ce-3854-4caf-96cf-e70a1b9bb392-kube-api-access-57922\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.601031 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-ovsdbserver-nb\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.601127 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-dns-swift-storage-0\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.601170 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-config\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.601263 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-dns-svc\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.615221 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld"] Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.625407 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.629877 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.633427 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-4xwqm" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.633599 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.641927 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld"] Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.677439 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-d99845895-c6s44"] Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.679532 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.685156 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.695906 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-d99845895-c6s44"] Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.703423 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57922\" (UniqueName: \"kubernetes.io/projected/94eef6ce-3854-4caf-96cf-e70a1b9bb392-kube-api-access-57922\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.703498 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-ovsdbserver-nb\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.703529 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f129356d-d3c2-4fc0-856e-2310b4c29996-config-data-custom\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.703649 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-dns-swift-storage-0\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.703682 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-config\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.703720 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5fft\" (UniqueName: \"kubernetes.io/projected/f129356d-d3c2-4fc0-856e-2310b4c29996-kube-api-access-m5fft\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.703744 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f129356d-d3c2-4fc0-856e-2310b4c29996-config-data\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.703791 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f129356d-d3c2-4fc0-856e-2310b4c29996-logs\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.703838 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f129356d-d3c2-4fc0-856e-2310b4c29996-combined-ca-bundle\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.703873 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-dns-svc\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.703902 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-ovsdbserver-sb\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.704815 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-config\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.704874 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-dns-svc\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.705317 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-ovsdbserver-nb\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.705413 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-dns-swift-storage-0\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.705701 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-ovsdbserver-sb\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.741221 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.742688 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.743736 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.784808 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.820262 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-config-data-custom\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.820305 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-config-data\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.820366 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3eb20c0f-b133-4aab-a43a-22dab1ae0630-logs\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.820403 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5fft\" (UniqueName: \"kubernetes.io/projected/f129356d-d3c2-4fc0-856e-2310b4c29996-kube-api-access-m5fft\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.820450 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f129356d-d3c2-4fc0-856e-2310b4c29996-config-data\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.820486 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f598e9b-b0ca-4b00-9d74-65466b0572f0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.820516 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eb20c0f-b133-4aab-a43a-22dab1ae0630-combined-ca-bundle\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.820626 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3eb20c0f-b133-4aab-a43a-22dab1ae0630-config-data-custom\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.820667 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f129356d-d3c2-4fc0-856e-2310b4c29996-logs\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.820993 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f129356d-d3c2-4fc0-856e-2310b4c29996-logs\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.821067 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-scripts\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.821102 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3eb20c0f-b133-4aab-a43a-22dab1ae0630-config-data\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.821140 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f129356d-d3c2-4fc0-856e-2310b4c29996-combined-ca-bundle\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.821212 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.821324 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f129356d-d3c2-4fc0-856e-2310b4c29996-config-data-custom\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.821342 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw98z\" (UniqueName: \"kubernetes.io/projected/3eb20c0f-b133-4aab-a43a-22dab1ae0630-kube-api-access-bw98z\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.821382 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f598e9b-b0ca-4b00-9d74-65466b0572f0-logs\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.821430 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4pwb\" (UniqueName: \"kubernetes.io/projected/9f598e9b-b0ca-4b00-9d74-65466b0572f0-kube-api-access-m4pwb\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.832365 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f129356d-d3c2-4fc0-856e-2310b4c29996-config-data-custom\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.862430 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f129356d-d3c2-4fc0-856e-2310b4c29996-config-data\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.872729 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5fft\" (UniqueName: \"kubernetes.io/projected/f129356d-d3c2-4fc0-856e-2310b4c29996-kube-api-access-m5fft\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.880956 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f129356d-d3c2-4fc0-856e-2310b4c29996-combined-ca-bundle\") pod \"barbican-keystone-listener-d7dcb9f9d-rc2ld\" (UID: \"f129356d-d3c2-4fc0-856e-2310b4c29996\") " pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.889930 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57922\" (UniqueName: \"kubernetes.io/projected/94eef6ce-3854-4caf-96cf-e70a1b9bb392-kube-api-access-57922\") pod \"dnsmasq-dns-8775748c9-2xss8\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.896435 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.929117 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw98z\" (UniqueName: \"kubernetes.io/projected/3eb20c0f-b133-4aab-a43a-22dab1ae0630-kube-api-access-bw98z\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.929175 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f598e9b-b0ca-4b00-9d74-65466b0572f0-logs\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.929205 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4pwb\" (UniqueName: \"kubernetes.io/projected/9f598e9b-b0ca-4b00-9d74-65466b0572f0-kube-api-access-m4pwb\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.929259 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-config-data-custom\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.929277 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-config-data\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.929301 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3eb20c0f-b133-4aab-a43a-22dab1ae0630-logs\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.929325 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f598e9b-b0ca-4b00-9d74-65466b0572f0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.929344 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eb20c0f-b133-4aab-a43a-22dab1ae0630-combined-ca-bundle\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.929362 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3eb20c0f-b133-4aab-a43a-22dab1ae0630-config-data-custom\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.929385 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-scripts\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.929405 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3eb20c0f-b133-4aab-a43a-22dab1ae0630-config-data\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.929436 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.930565 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3eb20c0f-b133-4aab-a43a-22dab1ae0630-logs\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.931093 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f598e9b-b0ca-4b00-9d74-65466b0572f0-logs\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.934794 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f598e9b-b0ca-4b00-9d74-65466b0572f0-etc-machine-id\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.955810 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3eb20c0f-b133-4aab-a43a-22dab1ae0630-config-data-custom\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.956327 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.957002 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3eb20c0f-b133-4aab-a43a-22dab1ae0630-combined-ca-bundle\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.959393 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3eb20c0f-b133-4aab-a43a-22dab1ae0630-config-data\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.959853 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-scripts\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.970443 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-config-data\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.984904 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-config-data-custom\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.989875 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:47 crc kubenswrapper[4982]: I1205 19:35:47.989893 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw98z\" (UniqueName: \"kubernetes.io/projected/3eb20c0f-b133-4aab-a43a-22dab1ae0630-kube-api-access-bw98z\") pod \"barbican-worker-d99845895-c6s44\" (UID: \"3eb20c0f-b133-4aab-a43a-22dab1ae0630\") " pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.005878 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-d99845895-c6s44" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.009172 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8775748c9-2xss8"] Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.009905 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.043935 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4pwb\" (UniqueName: \"kubernetes.io/projected/9f598e9b-b0ca-4b00-9d74-65466b0572f0-kube-api-access-m4pwb\") pod \"cinder-api-0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " pod="openstack/cinder-api-0" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.127891 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-z88c6"] Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.129961 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.130916 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36cc8bb3-de84-43c8-9b70-f14d2532598b","Type":"ContainerStarted","Data":"00672cb10518de2128cacb35feb5e04ed3902726f4f7202693f80494c7d340c5"} Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.131068 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="ceilometer-central-agent" containerID="cri-o://e2795feea1a4f33637a601a44d94fe2529c067e6de0b1673431a03a92acacf17" gracePeriod=30 Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.131363 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.131414 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="proxy-httpd" containerID="cri-o://00672cb10518de2128cacb35feb5e04ed3902726f4f7202693f80494c7d340c5" gracePeriod=30 Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.131468 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="sg-core" containerID="cri-o://cd9ae4591d12ee70f2340c2ec4fc262df617daffd39eb2e48f07d64200f7a193" gracePeriod=30 Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.131513 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="ceilometer-notification-agent" containerID="cri-o://35edd4ebb2af300e8a163647342d3edd88fce611210dcd2d7df66664064ebb92" gracePeriod=30 Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.162289 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.199253 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-z88c6"] Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.230790 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-76c99c8f46-qgbnz"] Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.232492 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.240946 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.241484 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.241537 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.241602 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-config\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.241728 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.241812 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65rm7\" (UniqueName: \"kubernetes.io/projected/8d03d5f4-36a5-4770-accd-9e04ec44fa75-kube-api-access-65rm7\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.241903 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.291206 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-76c99c8f46-qgbnz"] Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.348987 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-combined-ca-bundle\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.349070 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.349104 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-config-data-custom\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.349160 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65rm7\" (UniqueName: \"kubernetes.io/projected/8d03d5f4-36a5-4770-accd-9e04ec44fa75-kube-api-access-65rm7\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.349199 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-config-data\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.349227 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.349260 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-logs\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.349319 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.349339 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.349357 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnsws\" (UniqueName: \"kubernetes.io/projected/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-kube-api-access-xnsws\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.349384 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-config\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.350308 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.350348 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.350386 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.350863 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-config\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.351106 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.351121 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.267014815 podStartE2EDuration="1m11.351110214s" podCreationTimestamp="2025-12-05 19:34:37 +0000 UTC" firstStartedPulling="2025-12-05 19:34:39.755829912 +0000 UTC m=+1258.637715907" lastFinishedPulling="2025-12-05 19:35:46.839925311 +0000 UTC m=+1325.721811306" observedRunningTime="2025-12-05 19:35:48.30565265 +0000 UTC m=+1327.187538645" watchObservedRunningTime="2025-12-05 19:35:48.351110214 +0000 UTC m=+1327.232996209" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.389098 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65rm7\" (UniqueName: \"kubernetes.io/projected/8d03d5f4-36a5-4770-accd-9e04ec44fa75-kube-api-access-65rm7\") pod \"dnsmasq-dns-6bb4fc677f-z88c6\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.431122 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-pmmkj"] Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.450926 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnsws\" (UniqueName: \"kubernetes.io/projected/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-kube-api-access-xnsws\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.451272 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-combined-ca-bundle\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.451344 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-config-data-custom\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.451405 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-config-data\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.451459 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-logs\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.462631 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-logs\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.470471 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-combined-ca-bundle\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.476525 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-config-data-custom\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.478883 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-config-data\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.484782 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnsws\" (UniqueName: \"kubernetes.io/projected/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-kube-api-access-xnsws\") pod \"barbican-api-76c99c8f46-qgbnz\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.660662 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.740654 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:48 crc kubenswrapper[4982]: I1205 19:35:48.758775 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.083905 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.158678 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-pmmkj" event={"ID":"a9f9181c-b863-4744-bc40-5fd51918c5bd","Type":"ContainerStarted","Data":"067f9846581151dbaccf46ac8037faee9bc2b9c23cf7870ad046b11ad91d2592"} Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.161758 4982 generic.go:334] "Generic (PLEG): container finished" podID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerID="00672cb10518de2128cacb35feb5e04ed3902726f4f7202693f80494c7d340c5" exitCode=0 Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.161785 4982 generic.go:334] "Generic (PLEG): container finished" podID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerID="cd9ae4591d12ee70f2340c2ec4fc262df617daffd39eb2e48f07d64200f7a193" exitCode=2 Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.161815 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36cc8bb3-de84-43c8-9b70-f14d2532598b","Type":"ContainerDied","Data":"00672cb10518de2128cacb35feb5e04ed3902726f4f7202693f80494c7d340c5"} Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.161833 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36cc8bb3-de84-43c8-9b70-f14d2532598b","Type":"ContainerDied","Data":"cd9ae4591d12ee70f2340c2ec4fc262df617daffd39eb2e48f07d64200f7a193"} Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.169125 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"302649c4-5724-464e-a677-269161f1fb69","Type":"ContainerStarted","Data":"013f8bfe1d3e27b65034dc20436edb118687920fba80034384c105a601ab387a"} Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.361532 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.369662 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld"] Dec 05 19:35:49 crc kubenswrapper[4982]: W1205 19:35:49.382935 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf129356d_d3c2_4fc0_856e_2310b4c29996.slice/crio-289f6a51984119144e411a27d2e3c4f36b76418aebd3a5534df6926177bdd0ad WatchSource:0}: Error finding container 289f6a51984119144e411a27d2e3c4f36b76418aebd3a5534df6926177bdd0ad: Status 404 returned error can't find the container with id 289f6a51984119144e411a27d2e3c4f36b76418aebd3a5534df6926177bdd0ad Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.388458 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.429455 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-d99845895-c6s44"] Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.520929 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8775748c9-2xss8"] Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.615174 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-76c99c8f46-qgbnz"] Dec 05 19:35:49 crc kubenswrapper[4982]: W1205 19:35:49.617367 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7e6b4b5b_5a7d_4661_a9b2_4fa91135de2e.slice/crio-4020997986cd07096a7c9013a3d6089c379be53d37c35927d6a9c2f8ff34c890 WatchSource:0}: Error finding container 4020997986cd07096a7c9013a3d6089c379be53d37c35927d6a9c2f8ff34c890: Status 404 returned error can't find the container with id 4020997986cd07096a7c9013a3d6089c379be53d37c35927d6a9c2f8ff34c890 Dec 05 19:35:49 crc kubenswrapper[4982]: I1205 19:35:49.661069 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-z88c6"] Dec 05 19:35:49 crc kubenswrapper[4982]: W1205 19:35:49.670009 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8d03d5f4_36a5_4770_accd_9e04ec44fa75.slice/crio-b347f53fdfaf7adffbcb878beaaf3ee9ebff96c7d45da706a6187733c18318b9 WatchSource:0}: Error finding container b347f53fdfaf7adffbcb878beaaf3ee9ebff96c7d45da706a6187733c18318b9: Status 404 returned error can't find the container with id b347f53fdfaf7adffbcb878beaaf3ee9ebff96c7d45da706a6187733c18318b9 Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.230583 4982 generic.go:334] "Generic (PLEG): container finished" podID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerID="35edd4ebb2af300e8a163647342d3edd88fce611210dcd2d7df66664064ebb92" exitCode=0 Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.231109 4982 generic.go:334] "Generic (PLEG): container finished" podID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerID="e2795feea1a4f33637a601a44d94fe2529c067e6de0b1673431a03a92acacf17" exitCode=0 Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.230663 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36cc8bb3-de84-43c8-9b70-f14d2532598b","Type":"ContainerDied","Data":"35edd4ebb2af300e8a163647342d3edd88fce611210dcd2d7df66664064ebb92"} Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.231200 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36cc8bb3-de84-43c8-9b70-f14d2532598b","Type":"ContainerDied","Data":"e2795feea1a4f33637a601a44d94fe2529c067e6de0b1673431a03a92acacf17"} Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.234918 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-76c99c8f46-qgbnz" event={"ID":"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e","Type":"ContainerStarted","Data":"4020997986cd07096a7c9013a3d6089c379be53d37c35927d6a9c2f8ff34c890"} Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.239289 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9f598e9b-b0ca-4b00-9d74-65466b0572f0","Type":"ContainerStarted","Data":"d36cb4691f4d123252e2b5b03eaa3cd8475dea84a4c234553bd69ba2b75ce42a"} Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.292655 4982 generic.go:334] "Generic (PLEG): container finished" podID="94eef6ce-3854-4caf-96cf-e70a1b9bb392" containerID="89bdb02305e4e2ece7fabec70890b5f4bc3e0509a72780fe2245b57794a47df8" exitCode=0 Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.292722 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8775748c9-2xss8" event={"ID":"94eef6ce-3854-4caf-96cf-e70a1b9bb392","Type":"ContainerDied","Data":"89bdb02305e4e2ece7fabec70890b5f4bc3e0509a72780fe2245b57794a47df8"} Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.292749 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8775748c9-2xss8" event={"ID":"94eef6ce-3854-4caf-96cf-e70a1b9bb392","Type":"ContainerStarted","Data":"c7a3ee4c2a919e112fa92c954333e25ef35dd28fb3ded506f442f8961f8cb39d"} Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.300609 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-d99845895-c6s44" event={"ID":"3eb20c0f-b133-4aab-a43a-22dab1ae0630","Type":"ContainerStarted","Data":"34ec3099ba059f9bca9c7dd74d02d55191eea2fd80439c7591caea634946c6da"} Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.302267 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" event={"ID":"f129356d-d3c2-4fc0-856e-2310b4c29996","Type":"ContainerStarted","Data":"289f6a51984119144e411a27d2e3c4f36b76418aebd3a5534df6926177bdd0ad"} Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.321638 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-pmmkj" event={"ID":"a9f9181c-b863-4744-bc40-5fd51918c5bd","Type":"ContainerStarted","Data":"e8a6fd8175e38f99a4a2830096119f3de1dd95038840e6da5af5ca0f93cdf1b7"} Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.342032 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" event={"ID":"8d03d5f4-36a5-4770-accd-9e04ec44fa75","Type":"ContainerStarted","Data":"b347f53fdfaf7adffbcb878beaaf3ee9ebff96c7d45da706a6187733c18318b9"} Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.359141 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-pmmkj" podStartSLOduration=3.35910982 podStartE2EDuration="3.35910982s" podCreationTimestamp="2025-12-05 19:35:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:50.356911385 +0000 UTC m=+1329.238797380" watchObservedRunningTime="2025-12-05 19:35:50.35910982 +0000 UTC m=+1329.240995815" Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.821444 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.940999 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.946733 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-ovsdbserver-sb\") pod \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.946906 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-config\") pod \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.946951 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-dns-svc\") pod \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.947014 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-ovsdbserver-nb\") pod \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.947109 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57922\" (UniqueName: \"kubernetes.io/projected/94eef6ce-3854-4caf-96cf-e70a1b9bb392-kube-api-access-57922\") pod \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.947129 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-dns-swift-storage-0\") pod \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\" (UID: \"94eef6ce-3854-4caf-96cf-e70a1b9bb392\") " Dec 05 19:35:50 crc kubenswrapper[4982]: I1205 19:35:50.960490 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.003676 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94eef6ce-3854-4caf-96cf-e70a1b9bb392-kube-api-access-57922" (OuterVolumeSpecName: "kube-api-access-57922") pod "94eef6ce-3854-4caf-96cf-e70a1b9bb392" (UID: "94eef6ce-3854-4caf-96cf-e70a1b9bb392"). InnerVolumeSpecName "kube-api-access-57922". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.060511 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57922\" (UniqueName: \"kubernetes.io/projected/94eef6ce-3854-4caf-96cf-e70a1b9bb392-kube-api-access-57922\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.093803 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-67fdf46f87-pnjnc" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.147533 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "94eef6ce-3854-4caf-96cf-e70a1b9bb392" (UID: "94eef6ce-3854-4caf-96cf-e70a1b9bb392"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.168804 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-combined-ca-bundle\") pod \"36cc8bb3-de84-43c8-9b70-f14d2532598b\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.168936 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6b754\" (UniqueName: \"kubernetes.io/projected/36cc8bb3-de84-43c8-9b70-f14d2532598b-kube-api-access-6b754\") pod \"36cc8bb3-de84-43c8-9b70-f14d2532598b\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.168990 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36cc8bb3-de84-43c8-9b70-f14d2532598b-run-httpd\") pod \"36cc8bb3-de84-43c8-9b70-f14d2532598b\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.169020 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-scripts\") pod \"36cc8bb3-de84-43c8-9b70-f14d2532598b\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.169174 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36cc8bb3-de84-43c8-9b70-f14d2532598b-log-httpd\") pod \"36cc8bb3-de84-43c8-9b70-f14d2532598b\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.169292 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-sg-core-conf-yaml\") pod \"36cc8bb3-de84-43c8-9b70-f14d2532598b\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.169317 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-config-data\") pod \"36cc8bb3-de84-43c8-9b70-f14d2532598b\" (UID: \"36cc8bb3-de84-43c8-9b70-f14d2532598b\") " Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.169890 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.172594 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36cc8bb3-de84-43c8-9b70-f14d2532598b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "36cc8bb3-de84-43c8-9b70-f14d2532598b" (UID: "36cc8bb3-de84-43c8-9b70-f14d2532598b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.179591 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36cc8bb3-de84-43c8-9b70-f14d2532598b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "36cc8bb3-de84-43c8-9b70-f14d2532598b" (UID: "36cc8bb3-de84-43c8-9b70-f14d2532598b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.189822 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36cc8bb3-de84-43c8-9b70-f14d2532598b-kube-api-access-6b754" (OuterVolumeSpecName: "kube-api-access-6b754") pod "36cc8bb3-de84-43c8-9b70-f14d2532598b" (UID: "36cc8bb3-de84-43c8-9b70-f14d2532598b"). InnerVolumeSpecName "kube-api-access-6b754". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.190913 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-65f644b6d4-7k4x9"] Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.191094 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-65f644b6d4-7k4x9" podUID="09d53fa6-9ae9-4d81-ab50-281565de9186" containerName="neutron-api" containerID="cri-o://c397eece78b8f6050c7682c23c1109a74e624a9ccd50d154078437635693a49e" gracePeriod=30 Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.191591 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-65f644b6d4-7k4x9" podUID="09d53fa6-9ae9-4d81-ab50-281565de9186" containerName="neutron-httpd" containerID="cri-o://f9b53eccc6e3de879b9e1dbf175b2bbcfae4f700e6d90286b8eea0e91b1e496b" gracePeriod=30 Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.192400 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-scripts" (OuterVolumeSpecName: "scripts") pod "36cc8bb3-de84-43c8-9b70-f14d2532598b" (UID: "36cc8bb3-de84-43c8-9b70-f14d2532598b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.192828 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-config" (OuterVolumeSpecName: "config") pod "94eef6ce-3854-4caf-96cf-e70a1b9bb392" (UID: "94eef6ce-3854-4caf-96cf-e70a1b9bb392"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.193197 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "94eef6ce-3854-4caf-96cf-e70a1b9bb392" (UID: "94eef6ce-3854-4caf-96cf-e70a1b9bb392"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.208509 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "94eef6ce-3854-4caf-96cf-e70a1b9bb392" (UID: "94eef6ce-3854-4caf-96cf-e70a1b9bb392"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.260377 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "94eef6ce-3854-4caf-96cf-e70a1b9bb392" (UID: "94eef6ce-3854-4caf-96cf-e70a1b9bb392"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.277552 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.277775 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.277833 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.277884 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6b754\" (UniqueName: \"kubernetes.io/projected/36cc8bb3-de84-43c8-9b70-f14d2532598b-kube-api-access-6b754\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.277941 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36cc8bb3-de84-43c8-9b70-f14d2532598b-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.277999 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.278050 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/94eef6ce-3854-4caf-96cf-e70a1b9bb392-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.278099 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/36cc8bb3-de84-43c8-9b70-f14d2532598b-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.278720 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "36cc8bb3-de84-43c8-9b70-f14d2532598b" (UID: "36cc8bb3-de84-43c8-9b70-f14d2532598b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.420903 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.457881 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8775748c9-2xss8" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.458944 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.479203 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "36cc8bb3-de84-43c8-9b70-f14d2532598b" (UID: "36cc8bb3-de84-43c8-9b70-f14d2532598b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.540464 4982 generic.go:334] "Generic (PLEG): container finished" podID="8d03d5f4-36a5-4770-accd-9e04ec44fa75" containerID="45ee1744244c976a8b237a997db653cb3f304e79b89593b648d6192eb33f65b7" exitCode=0 Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.543914 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.592170 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-config-data" (OuterVolumeSpecName: "config-data") pod "36cc8bb3-de84-43c8-9b70-f14d2532598b" (UID: "36cc8bb3-de84-43c8-9b70-f14d2532598b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.648568 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36cc8bb3-de84-43c8-9b70-f14d2532598b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.699976 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"36cc8bb3-de84-43c8-9b70-f14d2532598b","Type":"ContainerDied","Data":"32807b1b9bd47626b97b76a69bfc4bded6d89df6a134efdc1abb3ff9dd751a80"} Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.700026 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.700056 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-76c99c8f46-qgbnz" event={"ID":"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e","Type":"ContainerStarted","Data":"d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd"} Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.700071 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-76c99c8f46-qgbnz" event={"ID":"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e","Type":"ContainerStarted","Data":"f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374"} Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.700085 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8775748c9-2xss8" event={"ID":"94eef6ce-3854-4caf-96cf-e70a1b9bb392","Type":"ContainerDied","Data":"c7a3ee4c2a919e112fa92c954333e25ef35dd28fb3ded506f442f8961f8cb39d"} Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.700099 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" event={"ID":"8d03d5f4-36a5-4770-accd-9e04ec44fa75","Type":"ContainerDied","Data":"45ee1744244c976a8b237a997db653cb3f304e79b89593b648d6192eb33f65b7"} Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.700171 4982 scope.go:117] "RemoveContainer" containerID="00672cb10518de2128cacb35feb5e04ed3902726f4f7202693f80494c7d340c5" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.776985 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-76c99c8f46-qgbnz" podStartSLOduration=3.776957405 podStartE2EDuration="3.776957405s" podCreationTimestamp="2025-12-05 19:35:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:51.747584585 +0000 UTC m=+1330.629470580" watchObservedRunningTime="2025-12-05 19:35:51.776957405 +0000 UTC m=+1330.658843400" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.798251 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8775748c9-2xss8"] Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.804775 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8775748c9-2xss8"] Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.818356 4982 scope.go:117] "RemoveContainer" containerID="cd9ae4591d12ee70f2340c2ec4fc262df617daffd39eb2e48f07d64200f7a193" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.878380 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.889575 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.898667 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:35:51 crc kubenswrapper[4982]: E1205 19:35:51.899016 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94eef6ce-3854-4caf-96cf-e70a1b9bb392" containerName="init" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.899032 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="94eef6ce-3854-4caf-96cf-e70a1b9bb392" containerName="init" Dec 05 19:35:51 crc kubenswrapper[4982]: E1205 19:35:51.899049 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="sg-core" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.899055 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="sg-core" Dec 05 19:35:51 crc kubenswrapper[4982]: E1205 19:35:51.899083 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="ceilometer-notification-agent" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.899089 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="ceilometer-notification-agent" Dec 05 19:35:51 crc kubenswrapper[4982]: E1205 19:35:51.899103 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="ceilometer-central-agent" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.899109 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="ceilometer-central-agent" Dec 05 19:35:51 crc kubenswrapper[4982]: E1205 19:35:51.899120 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="proxy-httpd" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.899126 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="proxy-httpd" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.899318 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="ceilometer-notification-agent" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.899333 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="ceilometer-central-agent" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.899342 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="proxy-httpd" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.899350 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="94eef6ce-3854-4caf-96cf-e70a1b9bb392" containerName="init" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.899357 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" containerName="sg-core" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.901461 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.906881 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.908369 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.939462 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.944532 4982 scope.go:117] "RemoveContainer" containerID="35edd4ebb2af300e8a163647342d3edd88fce611210dcd2d7df66664064ebb92" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.963476 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.963549 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf69e00a-7f8e-460a-ab56-7daaceeeef26-run-httpd\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.963574 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.963620 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf69e00a-7f8e-460a-ab56-7daaceeeef26-log-httpd\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.963637 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-config-data\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.963748 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-scripts\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.966231 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzbpj\" (UniqueName: \"kubernetes.io/projected/bf69e00a-7f8e-460a-ab56-7daaceeeef26-kube-api-access-fzbpj\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:51 crc kubenswrapper[4982]: I1205 19:35:51.992848 4982 scope.go:117] "RemoveContainer" containerID="e2795feea1a4f33637a601a44d94fe2529c067e6de0b1673431a03a92acacf17" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.026882 4982 scope.go:117] "RemoveContainer" containerID="89bdb02305e4e2ece7fabec70890b5f4bc3e0509a72780fe2245b57794a47df8" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.067641 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-scripts\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.067695 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzbpj\" (UniqueName: \"kubernetes.io/projected/bf69e00a-7f8e-460a-ab56-7daaceeeef26-kube-api-access-fzbpj\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.067788 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.068064 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf69e00a-7f8e-460a-ab56-7daaceeeef26-run-httpd\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.068086 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.068120 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf69e00a-7f8e-460a-ab56-7daaceeeef26-log-httpd\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.068135 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-config-data\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.074657 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf69e00a-7f8e-460a-ab56-7daaceeeef26-run-httpd\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.076915 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf69e00a-7f8e-460a-ab56-7daaceeeef26-log-httpd\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.082762 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-config-data\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.085490 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.094854 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.096828 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzbpj\" (UniqueName: \"kubernetes.io/projected/bf69e00a-7f8e-460a-ab56-7daaceeeef26-kube-api-access-fzbpj\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.101872 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-scripts\") pod \"ceilometer-0\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.237717 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.560959 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9f598e9b-b0ca-4b00-9d74-65466b0572f0","Type":"ContainerStarted","Data":"2bb45e2b1b589e00845688b9f607e1874df44370c3e20b3e80452078efc111ce"} Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.567187 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"302649c4-5724-464e-a677-269161f1fb69","Type":"ContainerStarted","Data":"b70c09534543aeee62cb92649d5a8da52f21534cb14caf271890201de4b95e2b"} Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.577642 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" event={"ID":"8d03d5f4-36a5-4770-accd-9e04ec44fa75","Type":"ContainerStarted","Data":"f7e64dbc21336c9c6cb443ed509916b551b3d05b01dbb02bacc41d8e84059446"} Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.577938 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.584889 4982 generic.go:334] "Generic (PLEG): container finished" podID="09d53fa6-9ae9-4d81-ab50-281565de9186" containerID="f9b53eccc6e3de879b9e1dbf175b2bbcfae4f700e6d90286b8eea0e91b1e496b" exitCode=0 Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.585458 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65f644b6d4-7k4x9" event={"ID":"09d53fa6-9ae9-4d81-ab50-281565de9186","Type":"ContainerDied","Data":"f9b53eccc6e3de879b9e1dbf175b2bbcfae4f700e6d90286b8eea0e91b1e496b"} Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.585496 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.630361 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" podStartSLOduration=5.630341352 podStartE2EDuration="5.630341352s" podCreationTimestamp="2025-12-05 19:35:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:52.599506596 +0000 UTC m=+1331.481392591" watchObservedRunningTime="2025-12-05 19:35:52.630341352 +0000 UTC m=+1331.512227347" Dec 05 19:35:52 crc kubenswrapper[4982]: I1205 19:35:52.864958 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:35:53 crc kubenswrapper[4982]: I1205 19:35:53.400394 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36cc8bb3-de84-43c8-9b70-f14d2532598b" path="/var/lib/kubelet/pods/36cc8bb3-de84-43c8-9b70-f14d2532598b/volumes" Dec 05 19:35:53 crc kubenswrapper[4982]: I1205 19:35:53.402628 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94eef6ce-3854-4caf-96cf-e70a1b9bb392" path="/var/lib/kubelet/pods/94eef6ce-3854-4caf-96cf-e70a1b9bb392/volumes" Dec 05 19:35:53 crc kubenswrapper[4982]: I1205 19:35:53.596610 4982 generic.go:334] "Generic (PLEG): container finished" podID="a9f9181c-b863-4744-bc40-5fd51918c5bd" containerID="e8a6fd8175e38f99a4a2830096119f3de1dd95038840e6da5af5ca0f93cdf1b7" exitCode=0 Dec 05 19:35:53 crc kubenswrapper[4982]: I1205 19:35:53.596697 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-pmmkj" event={"ID":"a9f9181c-b863-4744-bc40-5fd51918c5bd","Type":"ContainerDied","Data":"e8a6fd8175e38f99a4a2830096119f3de1dd95038840e6da5af5ca0f93cdf1b7"} Dec 05 19:35:53 crc kubenswrapper[4982]: I1205 19:35:53.599630 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="9f598e9b-b0ca-4b00-9d74-65466b0572f0" containerName="cinder-api-log" containerID="cri-o://2bb45e2b1b589e00845688b9f607e1874df44370c3e20b3e80452078efc111ce" gracePeriod=30 Dec 05 19:35:53 crc kubenswrapper[4982]: I1205 19:35:53.599704 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="9f598e9b-b0ca-4b00-9d74-65466b0572f0" containerName="cinder-api" containerID="cri-o://e1f83908f267c12d80fd57a54a69c4e0bae41d84e5b5e69edfa4bd0b652d62ad" gracePeriod=30 Dec 05 19:35:53 crc kubenswrapper[4982]: I1205 19:35:53.601252 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9f598e9b-b0ca-4b00-9d74-65466b0572f0","Type":"ContainerStarted","Data":"e1f83908f267c12d80fd57a54a69c4e0bae41d84e5b5e69edfa4bd0b652d62ad"} Dec 05 19:35:53 crc kubenswrapper[4982]: I1205 19:35:53.601304 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 19:35:53 crc kubenswrapper[4982]: I1205 19:35:53.604967 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"302649c4-5724-464e-a677-269161f1fb69","Type":"ContainerStarted","Data":"c69795baed5336c3c2d6d4fd1476205685843ffd375ea1ae8a45199921ca823d"} Dec 05 19:35:53 crc kubenswrapper[4982]: I1205 19:35:53.643526 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.35526322 podStartE2EDuration="6.643510082s" podCreationTimestamp="2025-12-05 19:35:47 +0000 UTC" firstStartedPulling="2025-12-05 19:35:48.878349584 +0000 UTC m=+1327.760235579" lastFinishedPulling="2025-12-05 19:35:50.166596446 +0000 UTC m=+1329.048482441" observedRunningTime="2025-12-05 19:35:53.640479595 +0000 UTC m=+1332.522365590" watchObservedRunningTime="2025-12-05 19:35:53.643510082 +0000 UTC m=+1332.525396077" Dec 05 19:35:53 crc kubenswrapper[4982]: I1205 19:35:53.663585 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.663567037 podStartE2EDuration="6.663567037s" podCreationTimestamp="2025-12-05 19:35:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:53.65657211 +0000 UTC m=+1332.538458105" watchObservedRunningTime="2025-12-05 19:35:53.663567037 +0000 UTC m=+1332.545453032" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.544129 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6fbc9dfdf4-kq8zv"] Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.549673 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.552117 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.552274 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.556814 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6fbc9dfdf4-kq8zv"] Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.613437 4982 generic.go:334] "Generic (PLEG): container finished" podID="9f598e9b-b0ca-4b00-9d74-65466b0572f0" containerID="2bb45e2b1b589e00845688b9f607e1874df44370c3e20b3e80452078efc111ce" exitCode=143 Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.614338 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9f598e9b-b0ca-4b00-9d74-65466b0572f0","Type":"ContainerDied","Data":"2bb45e2b1b589e00845688b9f607e1874df44370c3e20b3e80452078efc111ce"} Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.624731 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-combined-ca-bundle\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.624776 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-public-tls-certs\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.624805 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-config-data-custom\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.624842 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-logs\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.624911 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-internal-tls-certs\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.624946 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-config-data\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.624985 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx2jd\" (UniqueName: \"kubernetes.io/projected/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-kube-api-access-dx2jd\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.726668 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx2jd\" (UniqueName: \"kubernetes.io/projected/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-kube-api-access-dx2jd\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.726766 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-combined-ca-bundle\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.726790 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-public-tls-certs\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.726824 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-config-data-custom\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.726872 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-logs\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.727002 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-internal-tls-certs\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.727071 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-config-data\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.732654 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-logs\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.733449 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-internal-tls-certs\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.734497 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-combined-ca-bundle\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.736724 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-public-tls-certs\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.755030 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-config-data-custom\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.755565 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-config-data\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.757115 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx2jd\" (UniqueName: \"kubernetes.io/projected/cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb-kube-api-access-dx2jd\") pod \"barbican-api-6fbc9dfdf4-kq8zv\" (UID: \"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb\") " pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:54 crc kubenswrapper[4982]: I1205 19:35:54.865964 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:55 crc kubenswrapper[4982]: W1205 19:35:55.324681 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf69e00a_7f8e_460a_ab56_7daaceeeef26.slice/crio-18e0393d4d7380b37988b87f8b673f976fd212f40447dbfff6169df8038d70d3 WatchSource:0}: Error finding container 18e0393d4d7380b37988b87f8b673f976fd212f40447dbfff6169df8038d70d3: Status 404 returned error can't find the container with id 18e0393d4d7380b37988b87f8b673f976fd212f40447dbfff6169df8038d70d3 Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.510204 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.544569 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-config-data\") pod \"a9f9181c-b863-4744-bc40-5fd51918c5bd\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.544639 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-scripts\") pod \"a9f9181c-b863-4744-bc40-5fd51918c5bd\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.544772 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-combined-ca-bundle\") pod \"a9f9181c-b863-4744-bc40-5fd51918c5bd\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.544817 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/a9f9181c-b863-4744-bc40-5fd51918c5bd-certs\") pod \"a9f9181c-b863-4744-bc40-5fd51918c5bd\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.544886 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htr8r\" (UniqueName: \"kubernetes.io/projected/a9f9181c-b863-4744-bc40-5fd51918c5bd-kube-api-access-htr8r\") pod \"a9f9181c-b863-4744-bc40-5fd51918c5bd\" (UID: \"a9f9181c-b863-4744-bc40-5fd51918c5bd\") " Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.561460 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-scripts" (OuterVolumeSpecName: "scripts") pod "a9f9181c-b863-4744-bc40-5fd51918c5bd" (UID: "a9f9181c-b863-4744-bc40-5fd51918c5bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.568370 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9f9181c-b863-4744-bc40-5fd51918c5bd-kube-api-access-htr8r" (OuterVolumeSpecName: "kube-api-access-htr8r") pod "a9f9181c-b863-4744-bc40-5fd51918c5bd" (UID: "a9f9181c-b863-4744-bc40-5fd51918c5bd"). InnerVolumeSpecName "kube-api-access-htr8r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.568473 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9f9181c-b863-4744-bc40-5fd51918c5bd-certs" (OuterVolumeSpecName: "certs") pod "a9f9181c-b863-4744-bc40-5fd51918c5bd" (UID: "a9f9181c-b863-4744-bc40-5fd51918c5bd"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.632393 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-config-data" (OuterVolumeSpecName: "config-data") pod "a9f9181c-b863-4744-bc40-5fd51918c5bd" (UID: "a9f9181c-b863-4744-bc40-5fd51918c5bd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.647431 4982 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/a9f9181c-b863-4744-bc40-5fd51918c5bd-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.647622 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htr8r\" (UniqueName: \"kubernetes.io/projected/a9f9181c-b863-4744-bc40-5fd51918c5bd-kube-api-access-htr8r\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.647687 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.647742 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.649540 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a9f9181c-b863-4744-bc40-5fd51918c5bd" (UID: "a9f9181c-b863-4744-bc40-5fd51918c5bd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.706522 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-pmmkj" event={"ID":"a9f9181c-b863-4744-bc40-5fd51918c5bd","Type":"ContainerDied","Data":"067f9846581151dbaccf46ac8037faee9bc2b9c23cf7870ad046b11ad91d2592"} Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.706564 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="067f9846581151dbaccf46ac8037faee9bc2b9c23cf7870ad046b11ad91d2592" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.706613 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-pmmkj" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.718610 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf69e00a-7f8e-460a-ab56-7daaceeeef26","Type":"ContainerStarted","Data":"18e0393d4d7380b37988b87f8b673f976fd212f40447dbfff6169df8038d70d3"} Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.721084 4982 generic.go:334] "Generic (PLEG): container finished" podID="09d53fa6-9ae9-4d81-ab50-281565de9186" containerID="c397eece78b8f6050c7682c23c1109a74e624a9ccd50d154078437635693a49e" exitCode=0 Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.721240 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65f644b6d4-7k4x9" event={"ID":"09d53fa6-9ae9-4d81-ab50-281565de9186","Type":"ContainerDied","Data":"c397eece78b8f6050c7682c23c1109a74e624a9ccd50d154078437635693a49e"} Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.749339 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9f9181c-b863-4744-bc40-5fd51918c5bd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.753393 4982 generic.go:334] "Generic (PLEG): container finished" podID="9f598e9b-b0ca-4b00-9d74-65466b0572f0" containerID="e1f83908f267c12d80fd57a54a69c4e0bae41d84e5b5e69edfa4bd0b652d62ad" exitCode=0 Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.753685 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9f598e9b-b0ca-4b00-9d74-65466b0572f0","Type":"ContainerDied","Data":"e1f83908f267c12d80fd57a54a69c4e0bae41d84e5b5e69edfa4bd0b652d62ad"} Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.975547 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:35:55 crc kubenswrapper[4982]: E1205 19:35:55.976545 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9f9181c-b863-4744-bc40-5fd51918c5bd" containerName="cloudkitty-storageinit" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.976567 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9f9181c-b863-4744-bc40-5fd51918c5bd" containerName="cloudkitty-storageinit" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.977090 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9f9181c-b863-4744-bc40-5fd51918c5bd" containerName="cloudkitty-storageinit" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.978213 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.982441 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.982866 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.982998 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-xxdrk" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.984696 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.988836 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 05 19:35:55 crc kubenswrapper[4982]: I1205 19:35:55.990604 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.067279 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-certs\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.067330 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-config-data\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.067366 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.067416 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-scripts\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.067430 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24w29\" (UniqueName: \"kubernetes.io/projected/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-kube-api-access-24w29\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.067485 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.126538 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-z88c6"] Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.126838 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" podUID="8d03d5f4-36a5-4770-accd-9e04ec44fa75" containerName="dnsmasq-dns" containerID="cri-o://f7e64dbc21336c9c6cb443ed509916b551b3d05b01dbb02bacc41d8e84059446" gracePeriod=10 Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.168966 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.169079 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-certs\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.169100 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-config-data\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.169131 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.169190 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-scripts\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.169207 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24w29\" (UniqueName: \"kubernetes.io/projected/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-kube-api-access-24w29\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.171889 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86d9875b97-z4jwg"] Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.173675 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.179065 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.181364 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-scripts\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.186301 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-config-data\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.186966 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.192578 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-certs\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.194460 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86d9875b97-z4jwg"] Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.201030 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24w29\" (UniqueName: \"kubernetes.io/projected/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-kube-api-access-24w29\") pod \"cloudkitty-proc-0\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.235548 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.237256 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.239951 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.249708 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.271076 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.271249 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-ovsdbserver-sb\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.271327 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcblx\" (UniqueName: \"kubernetes.io/projected/ba94fb6b-e425-465d-a56d-227a4f96c75a-kube-api-access-qcblx\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.271422 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-svc\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.271509 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-ovsdbserver-nb\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.271662 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-config\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.271775 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-certs\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.271845 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-logs\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.271919 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-scripts\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.271984 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-config-data\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.272058 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzp99\" (UniqueName: \"kubernetes.io/projected/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-kube-api-access-qzp99\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.272168 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-swift-storage-0\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.272243 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.330294 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.373943 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.374299 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-ovsdbserver-sb\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.374318 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcblx\" (UniqueName: \"kubernetes.io/projected/ba94fb6b-e425-465d-a56d-227a4f96c75a-kube-api-access-qcblx\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.374350 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-svc\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.374379 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-ovsdbserver-nb\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.374413 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-config\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.374469 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-certs\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.374490 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-logs\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.374510 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-scripts\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.374527 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-config-data\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.374546 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzp99\" (UniqueName: \"kubernetes.io/projected/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-kube-api-access-qzp99\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.374566 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-swift-storage-0\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.374584 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.376379 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-ovsdbserver-sb\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.376801 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-swift-storage-0\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.377108 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-ovsdbserver-nb\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.377143 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-config\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.377823 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.381985 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-logs\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.383822 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-config-data\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.384264 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-scripts\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.385404 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.390469 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-certs\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.391876 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcblx\" (UniqueName: \"kubernetes.io/projected/ba94fb6b-e425-465d-a56d-227a4f96c75a-kube-api-access-qcblx\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.393544 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-svc\") pod \"dnsmasq-dns-86d9875b97-z4jwg\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.395447 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzp99\" (UniqueName: \"kubernetes.io/projected/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-kube-api-access-qzp99\") pod \"cloudkitty-api-0\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.641758 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.657739 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.669139 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.670087 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.796540 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f598e9b-b0ca-4b00-9d74-65466b0572f0-logs\") pod \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.796611 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-ovndb-tls-certs\") pod \"09d53fa6-9ae9-4d81-ab50-281565de9186\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.796668 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-combined-ca-bundle\") pod \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.796748 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-config\") pod \"09d53fa6-9ae9-4d81-ab50-281565de9186\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.796784 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-config-data\") pod \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.796857 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2bqp\" (UniqueName: \"kubernetes.io/projected/09d53fa6-9ae9-4d81-ab50-281565de9186-kube-api-access-h2bqp\") pod \"09d53fa6-9ae9-4d81-ab50-281565de9186\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.796876 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-httpd-config\") pod \"09d53fa6-9ae9-4d81-ab50-281565de9186\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.796904 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4pwb\" (UniqueName: \"kubernetes.io/projected/9f598e9b-b0ca-4b00-9d74-65466b0572f0-kube-api-access-m4pwb\") pod \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.796940 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-config-data-custom\") pod \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.796974 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-scripts\") pod \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.797005 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f598e9b-b0ca-4b00-9d74-65466b0572f0-etc-machine-id\") pod \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\" (UID: \"9f598e9b-b0ca-4b00-9d74-65466b0572f0\") " Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.797031 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-combined-ca-bundle\") pod \"09d53fa6-9ae9-4d81-ab50-281565de9186\" (UID: \"09d53fa6-9ae9-4d81-ab50-281565de9186\") " Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.800461 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f598e9b-b0ca-4b00-9d74-65466b0572f0-logs" (OuterVolumeSpecName: "logs") pod "9f598e9b-b0ca-4b00-9d74-65466b0572f0" (UID: "9f598e9b-b0ca-4b00-9d74-65466b0572f0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.802293 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9f598e9b-b0ca-4b00-9d74-65466b0572f0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "9f598e9b-b0ca-4b00-9d74-65466b0572f0" (UID: "9f598e9b-b0ca-4b00-9d74-65466b0572f0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.803630 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09d53fa6-9ae9-4d81-ab50-281565de9186-kube-api-access-h2bqp" (OuterVolumeSpecName: "kube-api-access-h2bqp") pod "09d53fa6-9ae9-4d81-ab50-281565de9186" (UID: "09d53fa6-9ae9-4d81-ab50-281565de9186"). InnerVolumeSpecName "kube-api-access-h2bqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.805432 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f598e9b-b0ca-4b00-9d74-65466b0572f0-kube-api-access-m4pwb" (OuterVolumeSpecName: "kube-api-access-m4pwb") pod "9f598e9b-b0ca-4b00-9d74-65466b0572f0" (UID: "9f598e9b-b0ca-4b00-9d74-65466b0572f0"). InnerVolumeSpecName "kube-api-access-m4pwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.808809 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-scripts" (OuterVolumeSpecName: "scripts") pod "9f598e9b-b0ca-4b00-9d74-65466b0572f0" (UID: "9f598e9b-b0ca-4b00-9d74-65466b0572f0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.809520 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "9f598e9b-b0ca-4b00-9d74-65466b0572f0" (UID: "9f598e9b-b0ca-4b00-9d74-65466b0572f0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.870631 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "09d53fa6-9ae9-4d81-ab50-281565de9186" (UID: "09d53fa6-9ae9-4d81-ab50-281565de9186"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.881402 4982 generic.go:334] "Generic (PLEG): container finished" podID="8d03d5f4-36a5-4770-accd-9e04ec44fa75" containerID="f7e64dbc21336c9c6cb443ed509916b551b3d05b01dbb02bacc41d8e84059446" exitCode=0 Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.881470 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" event={"ID":"8d03d5f4-36a5-4770-accd-9e04ec44fa75","Type":"ContainerDied","Data":"f7e64dbc21336c9c6cb443ed509916b551b3d05b01dbb02bacc41d8e84059446"} Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.899846 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2bqp\" (UniqueName: \"kubernetes.io/projected/09d53fa6-9ae9-4d81-ab50-281565de9186-kube-api-access-h2bqp\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.899875 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.899885 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4pwb\" (UniqueName: \"kubernetes.io/projected/9f598e9b-b0ca-4b00-9d74-65466b0572f0-kube-api-access-m4pwb\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.899894 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.899903 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.899911 4982 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9f598e9b-b0ca-4b00-9d74-65466b0572f0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.899919 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f598e9b-b0ca-4b00-9d74-65466b0572f0-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.938574 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09d53fa6-9ae9-4d81-ab50-281565de9186" (UID: "09d53fa6-9ae9-4d81-ab50-281565de9186"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.940640 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-65f644b6d4-7k4x9" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.940700 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65f644b6d4-7k4x9" event={"ID":"09d53fa6-9ae9-4d81-ab50-281565de9186","Type":"ContainerDied","Data":"cde8215a6d58a31c461021b075063e60e80e2444da75f3941021275f00f809fc"} Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.940766 4982 scope.go:117] "RemoveContainer" containerID="f9b53eccc6e3de879b9e1dbf175b2bbcfae4f700e6d90286b8eea0e91b1e496b" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.969138 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f598e9b-b0ca-4b00-9d74-65466b0572f0" (UID: "9f598e9b-b0ca-4b00-9d74-65466b0572f0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:56 crc kubenswrapper[4982]: I1205 19:35:56.982407 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-config-data" (OuterVolumeSpecName: "config-data") pod "9f598e9b-b0ca-4b00-9d74-65466b0572f0" (UID: "9f598e9b-b0ca-4b00-9d74-65466b0572f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:56.998341 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"9f598e9b-b0ca-4b00-9d74-65466b0572f0","Type":"ContainerDied","Data":"d36cb4691f4d123252e2b5b03eaa3cd8475dea84a4c234553bd69ba2b75ce42a"} Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:56.998420 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.001078 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.001113 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.001124 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f598e9b-b0ca-4b00-9d74-65466b0572f0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.036076 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-config" (OuterVolumeSpecName: "config") pod "09d53fa6-9ae9-4d81-ab50-281565de9186" (UID: "09d53fa6-9ae9-4d81-ab50-281565de9186"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.074782 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "09d53fa6-9ae9-4d81-ab50-281565de9186" (UID: "09d53fa6-9ae9-4d81-ab50-281565de9186"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.102540 4982 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.102574 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/09d53fa6-9ae9-4d81-ab50-281565de9186-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.194351 4982 scope.go:117] "RemoveContainer" containerID="c397eece78b8f6050c7682c23c1109a74e624a9ccd50d154078437635693a49e" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.212276 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.238664 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.259207 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 19:35:57 crc kubenswrapper[4982]: E1205 19:35:57.259687 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09d53fa6-9ae9-4d81-ab50-281565de9186" containerName="neutron-httpd" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.259711 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="09d53fa6-9ae9-4d81-ab50-281565de9186" containerName="neutron-httpd" Dec 05 19:35:57 crc kubenswrapper[4982]: E1205 19:35:57.259733 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f598e9b-b0ca-4b00-9d74-65466b0572f0" containerName="cinder-api" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.259741 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f598e9b-b0ca-4b00-9d74-65466b0572f0" containerName="cinder-api" Dec 05 19:35:57 crc kubenswrapper[4982]: E1205 19:35:57.259754 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f598e9b-b0ca-4b00-9d74-65466b0572f0" containerName="cinder-api-log" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.259762 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f598e9b-b0ca-4b00-9d74-65466b0572f0" containerName="cinder-api-log" Dec 05 19:35:57 crc kubenswrapper[4982]: E1205 19:35:57.259787 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09d53fa6-9ae9-4d81-ab50-281565de9186" containerName="neutron-api" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.259798 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="09d53fa6-9ae9-4d81-ab50-281565de9186" containerName="neutron-api" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.260061 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="09d53fa6-9ae9-4d81-ab50-281565de9186" containerName="neutron-httpd" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.260087 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f598e9b-b0ca-4b00-9d74-65466b0572f0" containerName="cinder-api" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.260108 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="09d53fa6-9ae9-4d81-ab50-281565de9186" containerName="neutron-api" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.260125 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f598e9b-b0ca-4b00-9d74-65466b0572f0" containerName="cinder-api-log" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.262436 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.267633 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.267875 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.268113 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.278704 4982 scope.go:117] "RemoveContainer" containerID="e1f83908f267c12d80fd57a54a69c4e0bae41d84e5b5e69edfa4bd0b652d62ad" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.279944 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.290093 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-65f644b6d4-7k4x9"] Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.301915 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-65f644b6d4-7k4x9"] Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.306358 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9n2h\" (UniqueName: \"kubernetes.io/projected/53a46b29-95f9-43a2-8d2a-770693317314-kube-api-access-x9n2h\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.306403 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53a46b29-95f9-43a2-8d2a-770693317314-logs\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.306423 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/53a46b29-95f9-43a2-8d2a-770693317314-etc-machine-id\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.306478 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-public-tls-certs\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.306533 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-scripts\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.306576 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.306594 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-config-data-custom\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.306629 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-config-data\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.306646 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.326410 4982 scope.go:117] "RemoveContainer" containerID="2bb45e2b1b589e00845688b9f607e1874df44370c3e20b3e80452078efc111ce" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.339603 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6fbc9dfdf4-kq8zv"] Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.398197 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.408796 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-config-data\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.408833 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.408939 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9n2h\" (UniqueName: \"kubernetes.io/projected/53a46b29-95f9-43a2-8d2a-770693317314-kube-api-access-x9n2h\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.408995 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53a46b29-95f9-43a2-8d2a-770693317314-logs\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.409012 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/53a46b29-95f9-43a2-8d2a-770693317314-etc-machine-id\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.409096 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-public-tls-certs\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.409212 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-scripts\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.409275 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.409293 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-config-data-custom\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.410874 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/53a46b29-95f9-43a2-8d2a-770693317314-logs\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.415040 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-config-data\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.415585 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/53a46b29-95f9-43a2-8d2a-770693317314-etc-machine-id\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.416330 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.417972 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-scripts\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.419564 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-config-data-custom\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.423688 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-public-tls-certs\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.432793 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09d53fa6-9ae9-4d81-ab50-281565de9186" path="/var/lib/kubelet/pods/09d53fa6-9ae9-4d81-ab50-281565de9186/volumes" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.433640 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f598e9b-b0ca-4b00-9d74-65466b0572f0" path="/var/lib/kubelet/pods/9f598e9b-b0ca-4b00-9d74-65466b0572f0/volumes" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.434922 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9n2h\" (UniqueName: \"kubernetes.io/projected/53a46b29-95f9-43a2-8d2a-770693317314-kube-api-access-x9n2h\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.455957 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53a46b29-95f9-43a2-8d2a-770693317314-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"53a46b29-95f9-43a2-8d2a-770693317314\") " pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.515960 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-dns-swift-storage-0\") pod \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.516097 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-dns-svc\") pod \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.516125 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65rm7\" (UniqueName: \"kubernetes.io/projected/8d03d5f4-36a5-4770-accd-9e04ec44fa75-kube-api-access-65rm7\") pod \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.516166 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-ovsdbserver-nb\") pod \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.516206 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-ovsdbserver-sb\") pod \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.516320 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-config\") pod \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\" (UID: \"8d03d5f4-36a5-4770-accd-9e04ec44fa75\") " Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.541236 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d03d5f4-36a5-4770-accd-9e04ec44fa75-kube-api-access-65rm7" (OuterVolumeSpecName: "kube-api-access-65rm7") pod "8d03d5f4-36a5-4770-accd-9e04ec44fa75" (UID: "8d03d5f4-36a5-4770-accd-9e04ec44fa75"). InnerVolumeSpecName "kube-api-access-65rm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.542265 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.605931 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8d03d5f4-36a5-4770-accd-9e04ec44fa75" (UID: "8d03d5f4-36a5-4770-accd-9e04ec44fa75"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.607780 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.619784 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.619810 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65rm7\" (UniqueName: \"kubernetes.io/projected/8d03d5f4-36a5-4770-accd-9e04ec44fa75-kube-api-access-65rm7\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.623179 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-config" (OuterVolumeSpecName: "config") pod "8d03d5f4-36a5-4770-accd-9e04ec44fa75" (UID: "8d03d5f4-36a5-4770-accd-9e04ec44fa75"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.639342 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8d03d5f4-36a5-4770-accd-9e04ec44fa75" (UID: "8d03d5f4-36a5-4770-accd-9e04ec44fa75"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.654542 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86d9875b97-z4jwg"] Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.664368 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:35:57 crc kubenswrapper[4982]: W1205 19:35:57.683042 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd64e387d_a88a_4b3e_ad6c_b7660aa61ee2.slice/crio-dc14543fdd1cfa237c1e34e4a3f35ec4fa4217c4ba296436b98e0e78fa59fc6d WatchSource:0}: Error finding container dc14543fdd1cfa237c1e34e4a3f35ec4fa4217c4ba296436b98e0e78fa59fc6d: Status 404 returned error can't find the container with id dc14543fdd1cfa237c1e34e4a3f35ec4fa4217c4ba296436b98e0e78fa59fc6d Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.722906 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.722936 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.743254 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.795745 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8d03d5f4-36a5-4770-accd-9e04ec44fa75" (UID: "8d03d5f4-36a5-4770-accd-9e04ec44fa75"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:57 crc kubenswrapper[4982]: I1205 19:35:57.825790 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.072805 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8d03d5f4-36a5-4770-accd-9e04ec44fa75" (UID: "8d03d5f4-36a5-4770-accd-9e04ec44fa75"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.082206 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.082328 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" event={"ID":"ba94fb6b-e425-465d-a56d-227a4f96c75a","Type":"ContainerStarted","Data":"ac68896e67bfcbdb6adae52641e11f33d0984a85384c5242b860573bed6173d7"} Dec 05 19:35:58 crc kubenswrapper[4982]: W1205 19:35:58.109452 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53a46b29_95f9_43a2_8d2a_770693317314.slice/crio-8cba191380aea05d6ef7bd203d47c77ae319653371371535bf64fba56e565408 WatchSource:0}: Error finding container 8cba191380aea05d6ef7bd203d47c77ae319653371371535bf64fba56e565408: Status 404 returned error can't find the container with id 8cba191380aea05d6ef7bd203d47c77ae319653371371535bf64fba56e565408 Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.137371 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" event={"ID":"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb","Type":"ContainerStarted","Data":"005bd097306362bf4fdbc4d5038170386c8daece7150382cac0dd67fbcc9136d"} Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.152428 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-d99845895-c6s44" event={"ID":"3eb20c0f-b133-4aab-a43a-22dab1ae0630","Type":"ContainerStarted","Data":"bcad86ab94477fabc6122a5c8e5e5f0b01feec04742ccdd2234c787ef96ea323"} Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.161772 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8d03d5f4-36a5-4770-accd-9e04ec44fa75-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.171386 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" event={"ID":"f129356d-d3c2-4fc0-856e-2310b4c29996","Type":"ContainerStarted","Data":"453906d36560e5ce707459d9326caa82a615a7e2cad50af6bddd0dd53ebf53f2"} Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.186967 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" event={"ID":"8d03d5f4-36a5-4770-accd-9e04ec44fa75","Type":"ContainerDied","Data":"b347f53fdfaf7adffbcb878beaaf3ee9ebff96c7d45da706a6187733c18318b9"} Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.187020 4982 scope.go:117] "RemoveContainer" containerID="f7e64dbc21336c9c6cb443ed509916b551b3d05b01dbb02bacc41d8e84059446" Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.187167 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-z88c6" Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.218502 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"aaa8ba1d-a5ea-4c3d-b020-20803817bf62","Type":"ContainerStarted","Data":"c406118f061f8ee6e2f6e9901adc9d5869f9714791794fcc1c0f7769d1c7084c"} Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.227490 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf69e00a-7f8e-460a-ab56-7daaceeeef26","Type":"ContainerStarted","Data":"06bff5795068f07446af0e61bdb956ec7862b380c928c2134e0fe34e61e49e5f"} Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.241276 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2","Type":"ContainerStarted","Data":"dc14543fdd1cfa237c1e34e4a3f35ec4fa4217c4ba296436b98e0e78fa59fc6d"} Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.255886 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.325239 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.543186 4982 scope.go:117] "RemoveContainer" containerID="45ee1744244c976a8b237a997db653cb3f304e79b89593b648d6192eb33f65b7" Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.545468 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-z88c6"] Dec 05 19:35:58 crc kubenswrapper[4982]: I1205 19:35:58.553785 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-z88c6"] Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.127577 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.261565 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"53a46b29-95f9-43a2-8d2a-770693317314","Type":"ContainerStarted","Data":"8cba191380aea05d6ef7bd203d47c77ae319653371371535bf64fba56e565408"} Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.279662 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" event={"ID":"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb","Type":"ContainerStarted","Data":"fd3ec714ee80828aeb973b6cc8d02e54f5bf8c62409dc661545884c9f5288884"} Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.280108 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" event={"ID":"cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb","Type":"ContainerStarted","Data":"ac903cdbec952e53202de6f5b48b578bc19b84de4270b0708de33e0199141fc1"} Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.281508 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.281583 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.298776 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-d99845895-c6s44" event={"ID":"3eb20c0f-b133-4aab-a43a-22dab1ae0630","Type":"ContainerStarted","Data":"4553302e87822f613a9dc8c1350b2d1d400d5ab7f180e893b5e70826c9ff0912"} Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.319862 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" podStartSLOduration=5.319846801 podStartE2EDuration="5.319846801s" podCreationTimestamp="2025-12-05 19:35:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:59.319322838 +0000 UTC m=+1338.201208833" watchObservedRunningTime="2025-12-05 19:35:59.319846801 +0000 UTC m=+1338.201732796" Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.353054 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-d99845895-c6s44" podStartSLOduration=5.14311381 podStartE2EDuration="12.353038687s" podCreationTimestamp="2025-12-05 19:35:47 +0000 UTC" firstStartedPulling="2025-12-05 19:35:49.438080931 +0000 UTC m=+1328.319966926" lastFinishedPulling="2025-12-05 19:35:56.648005808 +0000 UTC m=+1335.529891803" observedRunningTime="2025-12-05 19:35:59.352931074 +0000 UTC m=+1338.234817069" watchObservedRunningTime="2025-12-05 19:35:59.353038687 +0000 UTC m=+1338.234924682" Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.353472 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" event={"ID":"f129356d-d3c2-4fc0-856e-2310b4c29996","Type":"ContainerStarted","Data":"b91f9ae29f688c4cbf4a4f5bfa88d1068808eaa57d28f9e97ad354684c540043"} Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.370899 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf69e00a-7f8e-460a-ab56-7daaceeeef26","Type":"ContainerStarted","Data":"92a38e3356104ef13dad2f474994084f14d2e245a48efa7db50a7e39b95e02ec"} Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.382417 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-d7dcb9f9d-rc2ld" podStartSLOduration=5.140138236 podStartE2EDuration="12.382397146s" podCreationTimestamp="2025-12-05 19:35:47 +0000 UTC" firstStartedPulling="2025-12-05 19:35:49.388196936 +0000 UTC m=+1328.270082921" lastFinishedPulling="2025-12-05 19:35:56.630455836 +0000 UTC m=+1335.512341831" observedRunningTime="2025-12-05 19:35:59.378165379 +0000 UTC m=+1338.260051374" watchObservedRunningTime="2025-12-05 19:35:59.382397146 +0000 UTC m=+1338.264283141" Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.394515 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" containerName="cloudkitty-api-log" containerID="cri-o://a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c" gracePeriod=30 Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.394885 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" containerName="cloudkitty-api" containerID="cri-o://7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4" gracePeriod=30 Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.412962 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d03d5f4-36a5-4770-accd-9e04ec44fa75" path="/var/lib/kubelet/pods/8d03d5f4-36a5-4770-accd-9e04ec44fa75/volumes" Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.414877 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.414905 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2","Type":"ContainerStarted","Data":"7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4"} Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.414921 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2","Type":"ContainerStarted","Data":"a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c"} Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.417867 4982 generic.go:334] "Generic (PLEG): container finished" podID="ba94fb6b-e425-465d-a56d-227a4f96c75a" containerID="ec5f9e8385792e6cd70589a950c671e20b7e072f6474788a5f20f5870e53896c" exitCode=0 Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.418057 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="302649c4-5724-464e-a677-269161f1fb69" containerName="cinder-scheduler" containerID="cri-o://b70c09534543aeee62cb92649d5a8da52f21534cb14caf271890201de4b95e2b" gracePeriod=30 Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.418855 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" event={"ID":"ba94fb6b-e425-465d-a56d-227a4f96c75a","Type":"ContainerDied","Data":"ec5f9e8385792e6cd70589a950c671e20b7e072f6474788a5f20f5870e53896c"} Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.418909 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="302649c4-5724-464e-a677-269161f1fb69" containerName="probe" containerID="cri-o://c69795baed5336c3c2d6d4fd1476205685843ffd375ea1ae8a45199921ca823d" gracePeriod=30 Dec 05 19:35:59 crc kubenswrapper[4982]: I1205 19:35:59.434596 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=3.434580729 podStartE2EDuration="3.434580729s" podCreationTimestamp="2025-12-05 19:35:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:35:59.424164207 +0000 UTC m=+1338.306050212" watchObservedRunningTime="2025-12-05 19:35:59.434580729 +0000 UTC m=+1338.316466714" Dec 05 19:36:00 crc kubenswrapper[4982]: I1205 19:36:00.446974 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf69e00a-7f8e-460a-ab56-7daaceeeef26","Type":"ContainerStarted","Data":"55fe87081814ecf4fdb4b577ae366144b046ca83af874a32d8adb2a98bb37830"} Dec 05 19:36:00 crc kubenswrapper[4982]: I1205 19:36:00.449341 4982 generic.go:334] "Generic (PLEG): container finished" podID="d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" containerID="a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c" exitCode=143 Dec 05 19:36:00 crc kubenswrapper[4982]: I1205 19:36:00.449382 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2","Type":"ContainerDied","Data":"a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c"} Dec 05 19:36:00 crc kubenswrapper[4982]: I1205 19:36:00.452279 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"53a46b29-95f9-43a2-8d2a-770693317314","Type":"ContainerStarted","Data":"1f40ede7002263ffa9ebb54676074eb98fdd47246f7c477164a928c44a48f42f"} Dec 05 19:36:01 crc kubenswrapper[4982]: I1205 19:36:01.488662 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"aaa8ba1d-a5ea-4c3d-b020-20803817bf62","Type":"ContainerStarted","Data":"1735e27dc4c9f2eb26439032fca806277ed5c1d7f1a3920d2f5afc98931f1e2f"} Dec 05 19:36:01 crc kubenswrapper[4982]: I1205 19:36:01.499932 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" event={"ID":"ba94fb6b-e425-465d-a56d-227a4f96c75a","Type":"ContainerStarted","Data":"0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419"} Dec 05 19:36:01 crc kubenswrapper[4982]: I1205 19:36:01.500707 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:36:01 crc kubenswrapper[4982]: I1205 19:36:01.506087 4982 generic.go:334] "Generic (PLEG): container finished" podID="302649c4-5724-464e-a677-269161f1fb69" containerID="c69795baed5336c3c2d6d4fd1476205685843ffd375ea1ae8a45199921ca823d" exitCode=0 Dec 05 19:36:01 crc kubenswrapper[4982]: I1205 19:36:01.506855 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"302649c4-5724-464e-a677-269161f1fb69","Type":"ContainerDied","Data":"c69795baed5336c3c2d6d4fd1476205685843ffd375ea1ae8a45199921ca823d"} Dec 05 19:36:01 crc kubenswrapper[4982]: I1205 19:36:01.525405 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=3.268131881 podStartE2EDuration="6.525387159s" podCreationTimestamp="2025-12-05 19:35:55 +0000 UTC" firstStartedPulling="2025-12-05 19:35:57.545016723 +0000 UTC m=+1336.426902718" lastFinishedPulling="2025-12-05 19:36:00.802272001 +0000 UTC m=+1339.684157996" observedRunningTime="2025-12-05 19:36:01.507876839 +0000 UTC m=+1340.389762834" watchObservedRunningTime="2025-12-05 19:36:01.525387159 +0000 UTC m=+1340.407273154" Dec 05 19:36:01 crc kubenswrapper[4982]: I1205 19:36:01.544039 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:36:01 crc kubenswrapper[4982]: I1205 19:36:01.546471 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" podStartSLOduration=5.54645523 podStartE2EDuration="5.54645523s" podCreationTimestamp="2025-12-05 19:35:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:36:01.523500022 +0000 UTC m=+1340.405386017" watchObservedRunningTime="2025-12-05 19:36:01.54645523 +0000 UTC m=+1340.428341225" Dec 05 19:36:01 crc kubenswrapper[4982]: I1205 19:36:01.986381 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.097690 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.164590 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.167111 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-59b8477896-ww7nl" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.520358 4982 generic.go:334] "Generic (PLEG): container finished" podID="302649c4-5724-464e-a677-269161f1fb69" containerID="b70c09534543aeee62cb92649d5a8da52f21534cb14caf271890201de4b95e2b" exitCode=0 Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.520686 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"302649c4-5724-464e-a677-269161f1fb69","Type":"ContainerDied","Data":"b70c09534543aeee62cb92649d5a8da52f21534cb14caf271890201de4b95e2b"} Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.534832 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf69e00a-7f8e-460a-ab56-7daaceeeef26","Type":"ContainerStarted","Data":"89a8e5a1d1f4fe8126dbbd2a8839e34cfae22edbbeeb0ec535df4e177bb0759a"} Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.536390 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.562143 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"53a46b29-95f9-43a2-8d2a-770693317314","Type":"ContainerStarted","Data":"915395eed85d2c53732dd5410b9cf1a7e2280ce19c2ac9c2887ed299a2227305"} Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.562533 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.583576 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=5.313732935 podStartE2EDuration="11.583548771s" podCreationTimestamp="2025-12-05 19:35:51 +0000 UTC" firstStartedPulling="2025-12-05 19:35:55.361840888 +0000 UTC m=+1334.243726883" lastFinishedPulling="2025-12-05 19:36:01.631656724 +0000 UTC m=+1340.513542719" observedRunningTime="2025-12-05 19:36:02.564801209 +0000 UTC m=+1341.446687214" watchObservedRunningTime="2025-12-05 19:36:02.583548771 +0000 UTC m=+1341.465434766" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.613195 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.613172237 podStartE2EDuration="5.613172237s" podCreationTimestamp="2025-12-05 19:35:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:36:02.587596113 +0000 UTC m=+1341.469482108" watchObservedRunningTime="2025-12-05 19:36:02.613172237 +0000 UTC m=+1341.495058242" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.714123 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.906641 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-config-data-custom\") pod \"302649c4-5724-464e-a677-269161f1fb69\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.907050 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-config-data\") pod \"302649c4-5724-464e-a677-269161f1fb69\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.907250 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/302649c4-5724-464e-a677-269161f1fb69-etc-machine-id\") pod \"302649c4-5724-464e-a677-269161f1fb69\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.907313 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-scripts\") pod \"302649c4-5724-464e-a677-269161f1fb69\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.907348 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-combined-ca-bundle\") pod \"302649c4-5724-464e-a677-269161f1fb69\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.907356 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/302649c4-5724-464e-a677-269161f1fb69-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "302649c4-5724-464e-a677-269161f1fb69" (UID: "302649c4-5724-464e-a677-269161f1fb69"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.907390 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6gsgd\" (UniqueName: \"kubernetes.io/projected/302649c4-5724-464e-a677-269161f1fb69-kube-api-access-6gsgd\") pod \"302649c4-5724-464e-a677-269161f1fb69\" (UID: \"302649c4-5724-464e-a677-269161f1fb69\") " Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.908373 4982 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/302649c4-5724-464e-a677-269161f1fb69-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.917370 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "302649c4-5724-464e-a677-269161f1fb69" (UID: "302649c4-5724-464e-a677-269161f1fb69"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.932883 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-scripts" (OuterVolumeSpecName: "scripts") pod "302649c4-5724-464e-a677-269161f1fb69" (UID: "302649c4-5724-464e-a677-269161f1fb69"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:02 crc kubenswrapper[4982]: I1205 19:36:02.943366 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/302649c4-5724-464e-a677-269161f1fb69-kube-api-access-6gsgd" (OuterVolumeSpecName: "kube-api-access-6gsgd") pod "302649c4-5724-464e-a677-269161f1fb69" (UID: "302649c4-5724-464e-a677-269161f1fb69"). InnerVolumeSpecName "kube-api-access-6gsgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.014948 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.014985 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.014997 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6gsgd\" (UniqueName: \"kubernetes.io/projected/302649c4-5724-464e-a677-269161f1fb69-kube-api-access-6gsgd\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.077297 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "302649c4-5724-464e-a677-269161f1fb69" (UID: "302649c4-5724-464e-a677-269161f1fb69"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.122474 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.122772 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-config-data" (OuterVolumeSpecName: "config-data") pod "302649c4-5724-464e-a677-269161f1fb69" (UID: "302649c4-5724-464e-a677-269161f1fb69"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.229101 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/302649c4-5724-464e-a677-269161f1fb69-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.569393 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="aaa8ba1d-a5ea-4c3d-b020-20803817bf62" containerName="cloudkitty-proc" containerID="cri-o://1735e27dc4c9f2eb26439032fca806277ed5c1d7f1a3920d2f5afc98931f1e2f" gracePeriod=30 Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.569741 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.570930 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"302649c4-5724-464e-a677-269161f1fb69","Type":"ContainerDied","Data":"013f8bfe1d3e27b65034dc20436edb118687920fba80034384c105a601ab387a"} Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.571037 4982 scope.go:117] "RemoveContainer" containerID="c69795baed5336c3c2d6d4fd1476205685843ffd375ea1ae8a45199921ca823d" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.608664 4982 scope.go:117] "RemoveContainer" containerID="b70c09534543aeee62cb92649d5a8da52f21534cb14caf271890201de4b95e2b" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.616350 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.654515 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.659622 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-768c967fc5-mm4fv" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.694687 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 19:36:03 crc kubenswrapper[4982]: E1205 19:36:03.695201 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d03d5f4-36a5-4770-accd-9e04ec44fa75" containerName="init" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.695222 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d03d5f4-36a5-4770-accd-9e04ec44fa75" containerName="init" Dec 05 19:36:03 crc kubenswrapper[4982]: E1205 19:36:03.695246 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="302649c4-5724-464e-a677-269161f1fb69" containerName="cinder-scheduler" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.695255 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="302649c4-5724-464e-a677-269161f1fb69" containerName="cinder-scheduler" Dec 05 19:36:03 crc kubenswrapper[4982]: E1205 19:36:03.695277 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d03d5f4-36a5-4770-accd-9e04ec44fa75" containerName="dnsmasq-dns" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.695284 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d03d5f4-36a5-4770-accd-9e04ec44fa75" containerName="dnsmasq-dns" Dec 05 19:36:03 crc kubenswrapper[4982]: E1205 19:36:03.695301 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="302649c4-5724-464e-a677-269161f1fb69" containerName="probe" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.695308 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="302649c4-5724-464e-a677-269161f1fb69" containerName="probe" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.695570 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="302649c4-5724-464e-a677-269161f1fb69" containerName="cinder-scheduler" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.695621 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d03d5f4-36a5-4770-accd-9e04ec44fa75" containerName="dnsmasq-dns" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.695642 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="302649c4-5724-464e-a677-269161f1fb69" containerName="probe" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.718542 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.725653 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.752405 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.848988 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90a28c6e-ad50-4f47-900b-f35bc06060a3-config-data\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.849078 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqm96\" (UniqueName: \"kubernetes.io/projected/90a28c6e-ad50-4f47-900b-f35bc06060a3-kube-api-access-cqm96\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.849119 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/90a28c6e-ad50-4f47-900b-f35bc06060a3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.849226 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90a28c6e-ad50-4f47-900b-f35bc06060a3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.849282 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90a28c6e-ad50-4f47-900b-f35bc06060a3-scripts\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.849303 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/90a28c6e-ad50-4f47-900b-f35bc06060a3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.956620 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90a28c6e-ad50-4f47-900b-f35bc06060a3-scripts\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.956683 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/90a28c6e-ad50-4f47-900b-f35bc06060a3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.956786 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90a28c6e-ad50-4f47-900b-f35bc06060a3-config-data\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.956843 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqm96\" (UniqueName: \"kubernetes.io/projected/90a28c6e-ad50-4f47-900b-f35bc06060a3-kube-api-access-cqm96\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.956874 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/90a28c6e-ad50-4f47-900b-f35bc06060a3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.956965 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90a28c6e-ad50-4f47-900b-f35bc06060a3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.957249 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/90a28c6e-ad50-4f47-900b-f35bc06060a3-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.974994 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqm96\" (UniqueName: \"kubernetes.io/projected/90a28c6e-ad50-4f47-900b-f35bc06060a3-kube-api-access-cqm96\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.978369 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/90a28c6e-ad50-4f47-900b-f35bc06060a3-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.978416 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90a28c6e-ad50-4f47-900b-f35bc06060a3-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:03 crc kubenswrapper[4982]: I1205 19:36:03.994591 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90a28c6e-ad50-4f47-900b-f35bc06060a3-scripts\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:04 crc kubenswrapper[4982]: I1205 19:36:04.010173 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90a28c6e-ad50-4f47-900b-f35bc06060a3-config-data\") pod \"cinder-scheduler-0\" (UID: \"90a28c6e-ad50-4f47-900b-f35bc06060a3\") " pod="openstack/cinder-scheduler-0" Dec 05 19:36:04 crc kubenswrapper[4982]: I1205 19:36:04.079633 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 19:36:04 crc kubenswrapper[4982]: I1205 19:36:04.811892 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 19:36:05 crc kubenswrapper[4982]: I1205 19:36:05.414220 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="302649c4-5724-464e-a677-269161f1fb69" path="/var/lib/kubelet/pods/302649c4-5724-464e-a677-269161f1fb69/volumes" Dec 05 19:36:05 crc kubenswrapper[4982]: I1205 19:36:05.647908 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"90a28c6e-ad50-4f47-900b-f35bc06060a3","Type":"ContainerStarted","Data":"4b96fe166ba9ab92ec34d6b2a8fc0d5f78f3fd4adea9add7acca4842218ccb53"} Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.643289 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.658962 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"90a28c6e-ad50-4f47-900b-f35bc06060a3","Type":"ContainerStarted","Data":"1ecf7c310ec7cb066e02c922387397868903a251374458b6c0a83696d4e7283e"} Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.659013 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"90a28c6e-ad50-4f47-900b-f35bc06060a3","Type":"ContainerStarted","Data":"02c3c6d70c9c8c9df56514108fe7f57d03f359aef28456c4f90a69ddd195af08"} Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.691727 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.692995 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.695664 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-ggg8d" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.699475 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.699459359 podStartE2EDuration="3.699459359s" podCreationTimestamp="2025-12-05 19:36:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:36:06.69590751 +0000 UTC m=+1345.577793505" watchObservedRunningTime="2025-12-05 19:36:06.699459359 +0000 UTC m=+1345.581345354" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.707258 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.707831 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.753017 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.795218 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-6mnfx"] Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.795486 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" podUID="dd328ed6-3c75-4e39-9f77-3ce9629c8421" containerName="dnsmasq-dns" containerID="cri-o://c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808" gracePeriod=10 Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.852899 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdwvv\" (UniqueName: \"kubernetes.io/projected/0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2-kube-api-access-cdwvv\") pod \"openstackclient\" (UID: \"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2\") " pod="openstack/openstackclient" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.853142 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2\") " pod="openstack/openstackclient" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.853220 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2-openstack-config-secret\") pod \"openstackclient\" (UID: \"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2\") " pod="openstack/openstackclient" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.853282 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2-openstack-config\") pod \"openstackclient\" (UID: \"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2\") " pod="openstack/openstackclient" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.955346 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2-openstack-config\") pod \"openstackclient\" (UID: \"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2\") " pod="openstack/openstackclient" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.955756 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdwvv\" (UniqueName: \"kubernetes.io/projected/0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2-kube-api-access-cdwvv\") pod \"openstackclient\" (UID: \"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2\") " pod="openstack/openstackclient" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.955931 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2\") " pod="openstack/openstackclient" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.955984 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2-openstack-config-secret\") pod \"openstackclient\" (UID: \"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2\") " pod="openstack/openstackclient" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.956823 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2-openstack-config\") pod \"openstackclient\" (UID: \"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2\") " pod="openstack/openstackclient" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.969954 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2-openstack-config-secret\") pod \"openstackclient\" (UID: \"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2\") " pod="openstack/openstackclient" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.971892 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2-combined-ca-bundle\") pod \"openstackclient\" (UID: \"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2\") " pod="openstack/openstackclient" Dec 05 19:36:06 crc kubenswrapper[4982]: I1205 19:36:06.990713 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdwvv\" (UniqueName: \"kubernetes.io/projected/0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2-kube-api-access-cdwvv\") pod \"openstackclient\" (UID: \"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2\") " pod="openstack/openstackclient" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.035277 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.510396 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.576080 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cb2z2\" (UniqueName: \"kubernetes.io/projected/dd328ed6-3c75-4e39-9f77-3ce9629c8421-kube-api-access-cb2z2\") pod \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.576179 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-ovsdbserver-sb\") pod \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.576233 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-dns-svc\") pod \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.576285 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-dns-swift-storage-0\") pod \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.576337 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-ovsdbserver-nb\") pod \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.576443 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-config\") pod \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\" (UID: \"dd328ed6-3c75-4e39-9f77-3ce9629c8421\") " Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.611722 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd328ed6-3c75-4e39-9f77-3ce9629c8421-kube-api-access-cb2z2" (OuterVolumeSpecName: "kube-api-access-cb2z2") pod "dd328ed6-3c75-4e39-9f77-3ce9629c8421" (UID: "dd328ed6-3c75-4e39-9f77-3ce9629c8421"). InnerVolumeSpecName "kube-api-access-cb2z2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.616390 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.671497 4982 generic.go:334] "Generic (PLEG): container finished" podID="dd328ed6-3c75-4e39-9f77-3ce9629c8421" containerID="c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808" exitCode=0 Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.671846 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" event={"ID":"dd328ed6-3c75-4e39-9f77-3ce9629c8421","Type":"ContainerDied","Data":"c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808"} Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.671873 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" event={"ID":"dd328ed6-3c75-4e39-9f77-3ce9629c8421","Type":"ContainerDied","Data":"40cbb17511795a4cce15dad15e062cfa127fae20e8d1146df70c0f3aa5016994"} Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.671889 4982 scope.go:117] "RemoveContainer" containerID="c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.672009 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-6mnfx" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.675615 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2","Type":"ContainerStarted","Data":"0c6d300b610f794457e3e5cea5c452870f5fe36e2cb935cfef1708518f3556b5"} Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.688489 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cb2z2\" (UniqueName: \"kubernetes.io/projected/dd328ed6-3c75-4e39-9f77-3ce9629c8421-kube-api-access-cb2z2\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.698253 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "dd328ed6-3c75-4e39-9f77-3ce9629c8421" (UID: "dd328ed6-3c75-4e39-9f77-3ce9629c8421"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.698302 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "dd328ed6-3c75-4e39-9f77-3ce9629c8421" (UID: "dd328ed6-3c75-4e39-9f77-3ce9629c8421"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.700424 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "dd328ed6-3c75-4e39-9f77-3ce9629c8421" (UID: "dd328ed6-3c75-4e39-9f77-3ce9629c8421"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.705184 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-config" (OuterVolumeSpecName: "config") pod "dd328ed6-3c75-4e39-9f77-3ce9629c8421" (UID: "dd328ed6-3c75-4e39-9f77-3ce9629c8421"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.718863 4982 scope.go:117] "RemoveContainer" containerID="57c38d6b52727a9a1d1b26204a8f134b2bcf46315edc9187daefc82a524d88e2" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.725831 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "dd328ed6-3c75-4e39-9f77-3ce9629c8421" (UID: "dd328ed6-3c75-4e39-9f77-3ce9629c8421"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.743754 4982 scope.go:117] "RemoveContainer" containerID="c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808" Dec 05 19:36:07 crc kubenswrapper[4982]: E1205 19:36:07.746276 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808\": container with ID starting with c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808 not found: ID does not exist" containerID="c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.746316 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808"} err="failed to get container status \"c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808\": rpc error: code = NotFound desc = could not find container \"c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808\": container with ID starting with c1f9ca8ca8112cdc1acac81990400cc990dca0c328faa95bc468b687b571a808 not found: ID does not exist" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.746343 4982 scope.go:117] "RemoveContainer" containerID="57c38d6b52727a9a1d1b26204a8f134b2bcf46315edc9187daefc82a524d88e2" Dec 05 19:36:07 crc kubenswrapper[4982]: E1205 19:36:07.746701 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57c38d6b52727a9a1d1b26204a8f134b2bcf46315edc9187daefc82a524d88e2\": container with ID starting with 57c38d6b52727a9a1d1b26204a8f134b2bcf46315edc9187daefc82a524d88e2 not found: ID does not exist" containerID="57c38d6b52727a9a1d1b26204a8f134b2bcf46315edc9187daefc82a524d88e2" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.746731 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57c38d6b52727a9a1d1b26204a8f134b2bcf46315edc9187daefc82a524d88e2"} err="failed to get container status \"57c38d6b52727a9a1d1b26204a8f134b2bcf46315edc9187daefc82a524d88e2\": rpc error: code = NotFound desc = could not find container \"57c38d6b52727a9a1d1b26204a8f134b2bcf46315edc9187daefc82a524d88e2\": container with ID starting with 57c38d6b52727a9a1d1b26204a8f134b2bcf46315edc9187daefc82a524d88e2 not found: ID does not exist" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.790813 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.790848 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.790875 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.790885 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:07 crc kubenswrapper[4982]: I1205 19:36:07.790893 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dd328ed6-3c75-4e39-9f77-3ce9629c8421-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:08 crc kubenswrapper[4982]: I1205 19:36:08.022673 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-6mnfx"] Dec 05 19:36:08 crc kubenswrapper[4982]: I1205 19:36:08.028899 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-6mnfx"] Dec 05 19:36:08 crc kubenswrapper[4982]: I1205 19:36:08.693778 4982 generic.go:334] "Generic (PLEG): container finished" podID="aaa8ba1d-a5ea-4c3d-b020-20803817bf62" containerID="1735e27dc4c9f2eb26439032fca806277ed5c1d7f1a3920d2f5afc98931f1e2f" exitCode=0 Dec 05 19:36:08 crc kubenswrapper[4982]: I1205 19:36:08.694212 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"aaa8ba1d-a5ea-4c3d-b020-20803817bf62","Type":"ContainerDied","Data":"1735e27dc4c9f2eb26439032fca806277ed5c1d7f1a3920d2f5afc98931f1e2f"} Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.083248 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.095674 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.218008 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-scripts\") pod \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.218249 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24w29\" (UniqueName: \"kubernetes.io/projected/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-kube-api-access-24w29\") pod \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.218274 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-config-data-custom\") pod \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.218326 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-certs\") pod \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.218351 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-config-data\") pod \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.218405 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-combined-ca-bundle\") pod \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\" (UID: \"aaa8ba1d-a5ea-4c3d-b020-20803817bf62\") " Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.238077 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-scripts" (OuterVolumeSpecName: "scripts") pod "aaa8ba1d-a5ea-4c3d-b020-20803817bf62" (UID: "aaa8ba1d-a5ea-4c3d-b020-20803817bf62"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.243892 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "aaa8ba1d-a5ea-4c3d-b020-20803817bf62" (UID: "aaa8ba1d-a5ea-4c3d-b020-20803817bf62"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.246413 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-kube-api-access-24w29" (OuterVolumeSpecName: "kube-api-access-24w29") pod "aaa8ba1d-a5ea-4c3d-b020-20803817bf62" (UID: "aaa8ba1d-a5ea-4c3d-b020-20803817bf62"). InnerVolumeSpecName "kube-api-access-24w29". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.272124 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-certs" (OuterVolumeSpecName: "certs") pod "aaa8ba1d-a5ea-4c3d-b020-20803817bf62" (UID: "aaa8ba1d-a5ea-4c3d-b020-20803817bf62"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.297300 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.307302 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-config-data" (OuterVolumeSpecName: "config-data") pod "aaa8ba1d-a5ea-4c3d-b020-20803817bf62" (UID: "aaa8ba1d-a5ea-4c3d-b020-20803817bf62"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.315223 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aaa8ba1d-a5ea-4c3d-b020-20803817bf62" (UID: "aaa8ba1d-a5ea-4c3d-b020-20803817bf62"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.326270 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.326295 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24w29\" (UniqueName: \"kubernetes.io/projected/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-kube-api-access-24w29\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.326305 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.326313 4982 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.326321 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.326330 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaa8ba1d-a5ea-4c3d-b020-20803817bf62-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.403928 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd328ed6-3c75-4e39-9f77-3ce9629c8421" path="/var/lib/kubelet/pods/dd328ed6-3c75-4e39-9f77-3ce9629c8421/volumes" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.569807 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6fbc9dfdf4-kq8zv" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.645663 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-76c99c8f46-qgbnz"] Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.645883 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-76c99c8f46-qgbnz" podUID="7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" containerName="barbican-api-log" containerID="cri-o://f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374" gracePeriod=30 Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.646741 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-76c99c8f46-qgbnz" podUID="7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" containerName="barbican-api" containerID="cri-o://d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd" gracePeriod=30 Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.718310 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.718403 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"aaa8ba1d-a5ea-4c3d-b020-20803817bf62","Type":"ContainerDied","Data":"c406118f061f8ee6e2f6e9901adc9d5869f9714791794fcc1c0f7769d1c7084c"} Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.718441 4982 scope.go:117] "RemoveContainer" containerID="1735e27dc4c9f2eb26439032fca806277ed5c1d7f1a3920d2f5afc98931f1e2f" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.750317 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.771687 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.797736 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:36:09 crc kubenswrapper[4982]: E1205 19:36:09.798200 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd328ed6-3c75-4e39-9f77-3ce9629c8421" containerName="init" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.798215 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd328ed6-3c75-4e39-9f77-3ce9629c8421" containerName="init" Dec 05 19:36:09 crc kubenswrapper[4982]: E1205 19:36:09.798230 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd328ed6-3c75-4e39-9f77-3ce9629c8421" containerName="dnsmasq-dns" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.798236 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd328ed6-3c75-4e39-9f77-3ce9629c8421" containerName="dnsmasq-dns" Dec 05 19:36:09 crc kubenswrapper[4982]: E1205 19:36:09.798252 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaa8ba1d-a5ea-4c3d-b020-20803817bf62" containerName="cloudkitty-proc" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.798258 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaa8ba1d-a5ea-4c3d-b020-20803817bf62" containerName="cloudkitty-proc" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.798449 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaa8ba1d-a5ea-4c3d-b020-20803817bf62" containerName="cloudkitty-proc" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.798477 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd328ed6-3c75-4e39-9f77-3ce9629c8421" containerName="dnsmasq-dns" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.799179 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.802710 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.818507 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.852950 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-config-data\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.853220 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.853265 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfxc9\" (UniqueName: \"kubernetes.io/projected/c5a3369c-f919-4454-8675-60b641794708-kube-api-access-tfxc9\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.853370 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-scripts\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.853533 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/c5a3369c-f919-4454-8675-60b641794708-certs\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.853603 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.955454 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.955503 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfxc9\" (UniqueName: \"kubernetes.io/projected/c5a3369c-f919-4454-8675-60b641794708-kube-api-access-tfxc9\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.955571 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-scripts\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.955621 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/c5a3369c-f919-4454-8675-60b641794708-certs\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.955662 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.955704 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-config-data\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.965845 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.965888 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/c5a3369c-f919-4454-8675-60b641794708-certs\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.969957 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-config-data\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.970547 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-scripts\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.970634 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:09 crc kubenswrapper[4982]: I1205 19:36:09.982709 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfxc9\" (UniqueName: \"kubernetes.io/projected/c5a3369c-f919-4454-8675-60b641794708-kube-api-access-tfxc9\") pod \"cloudkitty-proc-0\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:36:10 crc kubenswrapper[4982]: I1205 19:36:10.165083 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 19:36:10 crc kubenswrapper[4982]: I1205 19:36:10.552042 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 19:36:10 crc kubenswrapper[4982]: I1205 19:36:10.688415 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:36:10 crc kubenswrapper[4982]: I1205 19:36:10.789943 4982 generic.go:334] "Generic (PLEG): container finished" podID="7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" containerID="f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374" exitCode=143 Dec 05 19:36:10 crc kubenswrapper[4982]: I1205 19:36:10.790044 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-76c99c8f46-qgbnz" event={"ID":"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e","Type":"ContainerDied","Data":"f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374"} Dec 05 19:36:11 crc kubenswrapper[4982]: I1205 19:36:11.405372 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aaa8ba1d-a5ea-4c3d-b020-20803817bf62" path="/var/lib/kubelet/pods/aaa8ba1d-a5ea-4c3d-b020-20803817bf62/volumes" Dec 05 19:36:11 crc kubenswrapper[4982]: I1205 19:36:11.819917 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"c5a3369c-f919-4454-8675-60b641794708","Type":"ContainerStarted","Data":"0f764eeed87c84b9be9aaf8bd10ccd088023ac9a79b833a4fe1ce9c4cf1eb54b"} Dec 05 19:36:11 crc kubenswrapper[4982]: I1205 19:36:11.819959 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"c5a3369c-f919-4454-8675-60b641794708","Type":"ContainerStarted","Data":"93d1f6fdf4673538fdb395d2304d4886ba698fdb09c65232fc63926d7cf330d5"} Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.638330 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.659043 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=4.659022125 podStartE2EDuration="4.659022125s" podCreationTimestamp="2025-12-05 19:36:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:36:11.843334198 +0000 UTC m=+1350.725220193" watchObservedRunningTime="2025-12-05 19:36:13.659022125 +0000 UTC m=+1352.540908150" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.768515 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-combined-ca-bundle\") pod \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.768579 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-config-data\") pod \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.768673 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnsws\" (UniqueName: \"kubernetes.io/projected/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-kube-api-access-xnsws\") pod \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.768696 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-logs\") pod \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.768743 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-config-data-custom\") pod \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\" (UID: \"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e\") " Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.770123 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-logs" (OuterVolumeSpecName: "logs") pod "7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" (UID: "7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.774519 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" (UID: "7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.775610 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-kube-api-access-xnsws" (OuterVolumeSpecName: "kube-api-access-xnsws") pod "7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" (UID: "7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e"). InnerVolumeSpecName "kube-api-access-xnsws". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.826930 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-config-data" (OuterVolumeSpecName: "config-data") pod "7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" (UID: "7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.839879 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" (UID: "7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.845184 4982 generic.go:334] "Generic (PLEG): container finished" podID="7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" containerID="d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd" exitCode=0 Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.845228 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-76c99c8f46-qgbnz" event={"ID":"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e","Type":"ContainerDied","Data":"d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd"} Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.845257 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-76c99c8f46-qgbnz" event={"ID":"7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e","Type":"ContainerDied","Data":"4020997986cd07096a7c9013a3d6089c379be53d37c35927d6a9c2f8ff34c890"} Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.845275 4982 scope.go:117] "RemoveContainer" containerID="d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.845397 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-76c99c8f46-qgbnz" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.874127 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.874223 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.874241 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnsws\" (UniqueName: \"kubernetes.io/projected/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-kube-api-access-xnsws\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.874254 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.874265 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.905415 4982 scope.go:117] "RemoveContainer" containerID="f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.926816 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-76c99c8f46-qgbnz"] Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.930979 4982 scope.go:117] "RemoveContainer" containerID="d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd" Dec 05 19:36:13 crc kubenswrapper[4982]: E1205 19:36:13.932023 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd\": container with ID starting with d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd not found: ID does not exist" containerID="d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.932059 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd"} err="failed to get container status \"d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd\": rpc error: code = NotFound desc = could not find container \"d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd\": container with ID starting with d5c3c8028be7e94b62d2a0c807d55451d7ed86fc4ff9fd18e5afee3f012078dd not found: ID does not exist" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.932080 4982 scope.go:117] "RemoveContainer" containerID="f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374" Dec 05 19:36:13 crc kubenswrapper[4982]: E1205 19:36:13.932322 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374\": container with ID starting with f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374 not found: ID does not exist" containerID="f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.932346 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374"} err="failed to get container status \"f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374\": rpc error: code = NotFound desc = could not find container \"f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374\": container with ID starting with f4a9833dd7252abf9710d78da0b4b3d1263ff9a73f5f6edc6945e8800c978374 not found: ID does not exist" Dec 05 19:36:13 crc kubenswrapper[4982]: I1205 19:36:13.936074 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-76c99c8f46-qgbnz"] Dec 05 19:36:14 crc kubenswrapper[4982]: I1205 19:36:14.316859 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.127637 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-fc787d555-k2pps"] Dec 05 19:36:15 crc kubenswrapper[4982]: E1205 19:36:15.128260 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" containerName="barbican-api-log" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.128273 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" containerName="barbican-api-log" Dec 05 19:36:15 crc kubenswrapper[4982]: E1205 19:36:15.128288 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" containerName="barbican-api" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.128294 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" containerName="barbican-api" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.128463 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" containerName="barbican-api" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.128494 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" containerName="barbican-api-log" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.129517 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.133023 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.133059 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.133025 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.150586 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-fc787d555-k2pps"] Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.197443 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4ae7179d-f311-4080-9409-b5315377edea-log-httpd\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.197716 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4ae7179d-f311-4080-9409-b5315377edea-etc-swift\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.197828 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7179d-f311-4080-9409-b5315377edea-public-tls-certs\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.197865 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7179d-f311-4080-9409-b5315377edea-combined-ca-bundle\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.197890 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae7179d-f311-4080-9409-b5315377edea-config-data\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.197933 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4ae7179d-f311-4080-9409-b5315377edea-run-httpd\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.198040 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvv24\" (UniqueName: \"kubernetes.io/projected/4ae7179d-f311-4080-9409-b5315377edea-kube-api-access-jvv24\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.198255 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7179d-f311-4080-9409-b5315377edea-internal-tls-certs\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.299933 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7179d-f311-4080-9409-b5315377edea-public-tls-certs\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.299987 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7179d-f311-4080-9409-b5315377edea-combined-ca-bundle\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.300011 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae7179d-f311-4080-9409-b5315377edea-config-data\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.300040 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4ae7179d-f311-4080-9409-b5315377edea-run-httpd\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.300118 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvv24\" (UniqueName: \"kubernetes.io/projected/4ae7179d-f311-4080-9409-b5315377edea-kube-api-access-jvv24\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.300371 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7179d-f311-4080-9409-b5315377edea-internal-tls-certs\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.300450 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4ae7179d-f311-4080-9409-b5315377edea-log-httpd\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.300579 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4ae7179d-f311-4080-9409-b5315377edea-etc-swift\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.301121 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4ae7179d-f311-4080-9409-b5315377edea-log-httpd\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.301398 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4ae7179d-f311-4080-9409-b5315377edea-run-httpd\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.305219 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ae7179d-f311-4080-9409-b5315377edea-config-data\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.305858 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ae7179d-f311-4080-9409-b5315377edea-combined-ca-bundle\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.317970 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7179d-f311-4080-9409-b5315377edea-public-tls-certs\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.318007 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4ae7179d-f311-4080-9409-b5315377edea-internal-tls-certs\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.318571 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/4ae7179d-f311-4080-9409-b5315377edea-etc-swift\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.324062 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvv24\" (UniqueName: \"kubernetes.io/projected/4ae7179d-f311-4080-9409-b5315377edea-kube-api-access-jvv24\") pod \"swift-proxy-fc787d555-k2pps\" (UID: \"4ae7179d-f311-4080-9409-b5315377edea\") " pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.409132 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e" path="/var/lib/kubelet/pods/7e6b4b5b-5a7d-4661-a9b2-4fa91135de2e/volumes" Dec 05 19:36:15 crc kubenswrapper[4982]: I1205 19:36:15.446650 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:17 crc kubenswrapper[4982]: I1205 19:36:17.589305 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:17 crc kubenswrapper[4982]: I1205 19:36:17.591176 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="ceilometer-central-agent" containerID="cri-o://06bff5795068f07446af0e61bdb956ec7862b380c928c2134e0fe34e61e49e5f" gracePeriod=30 Dec 05 19:36:17 crc kubenswrapper[4982]: I1205 19:36:17.591288 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="ceilometer-notification-agent" containerID="cri-o://92a38e3356104ef13dad2f474994084f14d2e245a48efa7db50a7e39b95e02ec" gracePeriod=30 Dec 05 19:36:17 crc kubenswrapper[4982]: I1205 19:36:17.591270 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="sg-core" containerID="cri-o://55fe87081814ecf4fdb4b577ae366144b046ca83af874a32d8adb2a98bb37830" gracePeriod=30 Dec 05 19:36:17 crc kubenswrapper[4982]: I1205 19:36:17.591299 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="proxy-httpd" containerID="cri-o://89a8e5a1d1f4fe8126dbbd2a8839e34cfae22edbbeeb0ec535df4e177bb0759a" gracePeriod=30 Dec 05 19:36:17 crc kubenswrapper[4982]: I1205 19:36:17.601549 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.182:3000/\": EOF" Dec 05 19:36:17 crc kubenswrapper[4982]: I1205 19:36:17.931885 4982 generic.go:334] "Generic (PLEG): container finished" podID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerID="89a8e5a1d1f4fe8126dbbd2a8839e34cfae22edbbeeb0ec535df4e177bb0759a" exitCode=0 Dec 05 19:36:17 crc kubenswrapper[4982]: I1205 19:36:17.931943 4982 generic.go:334] "Generic (PLEG): container finished" podID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerID="55fe87081814ecf4fdb4b577ae366144b046ca83af874a32d8adb2a98bb37830" exitCode=2 Dec 05 19:36:17 crc kubenswrapper[4982]: I1205 19:36:17.931939 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf69e00a-7f8e-460a-ab56-7daaceeeef26","Type":"ContainerDied","Data":"89a8e5a1d1f4fe8126dbbd2a8839e34cfae22edbbeeb0ec535df4e177bb0759a"} Dec 05 19:36:17 crc kubenswrapper[4982]: I1205 19:36:17.931984 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf69e00a-7f8e-460a-ab56-7daaceeeef26","Type":"ContainerDied","Data":"55fe87081814ecf4fdb4b577ae366144b046ca83af874a32d8adb2a98bb37830"} Dec 05 19:36:18 crc kubenswrapper[4982]: I1205 19:36:18.282650 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:36:18 crc kubenswrapper[4982]: I1205 19:36:18.282928 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b9aa8afe-080d-49c5-b812-dba7db048ed9" containerName="glance-log" containerID="cri-o://e1ffacc643eb622b9190264e0d7bc7dbfa1d66d2e6532aed2f5fc811b3a0027b" gracePeriod=30 Dec 05 19:36:18 crc kubenswrapper[4982]: I1205 19:36:18.283045 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b9aa8afe-080d-49c5-b812-dba7db048ed9" containerName="glance-httpd" containerID="cri-o://73cc935b8f684877c72bc1969c15c1c851a56db75a8b48d1fdd2a567d9334e1b" gracePeriod=30 Dec 05 19:36:18 crc kubenswrapper[4982]: I1205 19:36:18.943631 4982 generic.go:334] "Generic (PLEG): container finished" podID="b9aa8afe-080d-49c5-b812-dba7db048ed9" containerID="e1ffacc643eb622b9190264e0d7bc7dbfa1d66d2e6532aed2f5fc811b3a0027b" exitCode=143 Dec 05 19:36:18 crc kubenswrapper[4982]: I1205 19:36:18.943798 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9aa8afe-080d-49c5-b812-dba7db048ed9","Type":"ContainerDied","Data":"e1ffacc643eb622b9190264e0d7bc7dbfa1d66d2e6532aed2f5fc811b3a0027b"} Dec 05 19:36:18 crc kubenswrapper[4982]: I1205 19:36:18.975770 4982 generic.go:334] "Generic (PLEG): container finished" podID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerID="06bff5795068f07446af0e61bdb956ec7862b380c928c2134e0fe34e61e49e5f" exitCode=0 Dec 05 19:36:18 crc kubenswrapper[4982]: I1205 19:36:18.975880 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf69e00a-7f8e-460a-ab56-7daaceeeef26","Type":"ContainerDied","Data":"06bff5795068f07446af0e61bdb956ec7862b380c928c2134e0fe34e61e49e5f"} Dec 05 19:36:20 crc kubenswrapper[4982]: I1205 19:36:20.462099 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:36:20 crc kubenswrapper[4982]: I1205 19:36:20.462710 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" containerName="glance-log" containerID="cri-o://2738a0bbfda3397a5d061f2c64da95ca95b1573dd45749e895f1c1ae38faa248" gracePeriod=30 Dec 05 19:36:20 crc kubenswrapper[4982]: I1205 19:36:20.462803 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" containerName="glance-httpd" containerID="cri-o://8d88d572844c2446dd092f139ccfa43054b9e61b1e06ce3394126ebd6a3b5391" gracePeriod=30 Dec 05 19:36:20 crc kubenswrapper[4982]: I1205 19:36:20.996912 4982 generic.go:334] "Generic (PLEG): container finished" podID="4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" containerID="2738a0bbfda3397a5d061f2c64da95ca95b1573dd45749e895f1c1ae38faa248" exitCode=143 Dec 05 19:36:20 crc kubenswrapper[4982]: I1205 19:36:20.996973 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039","Type":"ContainerDied","Data":"2738a0bbfda3397a5d061f2c64da95ca95b1573dd45749e895f1c1ae38faa248"} Dec 05 19:36:20 crc kubenswrapper[4982]: I1205 19:36:20.999247 4982 generic.go:334] "Generic (PLEG): container finished" podID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerID="92a38e3356104ef13dad2f474994084f14d2e245a48efa7db50a7e39b95e02ec" exitCode=0 Dec 05 19:36:20 crc kubenswrapper[4982]: I1205 19:36:20.999270 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf69e00a-7f8e-460a-ab56-7daaceeeef26","Type":"ContainerDied","Data":"92a38e3356104ef13dad2f474994084f14d2e245a48efa7db50a7e39b95e02ec"} Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.028976 4982 generic.go:334] "Generic (PLEG): container finished" podID="b9aa8afe-080d-49c5-b812-dba7db048ed9" containerID="73cc935b8f684877c72bc1969c15c1c851a56db75a8b48d1fdd2a567d9334e1b" exitCode=0 Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.029415 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9aa8afe-080d-49c5-b812-dba7db048ed9","Type":"ContainerDied","Data":"73cc935b8f684877c72bc1969c15c1c851a56db75a8b48d1fdd2a567d9334e1b"} Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.220133 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.238911 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-sg-core-conf-yaml\") pod \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.238989 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-config-data\") pod \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.239036 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-scripts\") pod \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.239076 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf69e00a-7f8e-460a-ab56-7daaceeeef26-log-httpd\") pod \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.239174 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-combined-ca-bundle\") pod \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.239272 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzbpj\" (UniqueName: \"kubernetes.io/projected/bf69e00a-7f8e-460a-ab56-7daaceeeef26-kube-api-access-fzbpj\") pod \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.239359 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf69e00a-7f8e-460a-ab56-7daaceeeef26-run-httpd\") pod \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\" (UID: \"bf69e00a-7f8e-460a-ab56-7daaceeeef26\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.240355 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf69e00a-7f8e-460a-ab56-7daaceeeef26-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "bf69e00a-7f8e-460a-ab56-7daaceeeef26" (UID: "bf69e00a-7f8e-460a-ab56-7daaceeeef26"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.255740 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-scripts" (OuterVolumeSpecName: "scripts") pod "bf69e00a-7f8e-460a-ab56-7daaceeeef26" (UID: "bf69e00a-7f8e-460a-ab56-7daaceeeef26"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.255781 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf69e00a-7f8e-460a-ab56-7daaceeeef26-kube-api-access-fzbpj" (OuterVolumeSpecName: "kube-api-access-fzbpj") pod "bf69e00a-7f8e-460a-ab56-7daaceeeef26" (UID: "bf69e00a-7f8e-460a-ab56-7daaceeeef26"). InnerVolumeSpecName "kube-api-access-fzbpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.257941 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf69e00a-7f8e-460a-ab56-7daaceeeef26-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "bf69e00a-7f8e-460a-ab56-7daaceeeef26" (UID: "bf69e00a-7f8e-460a-ab56-7daaceeeef26"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.320519 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "bf69e00a-7f8e-460a-ab56-7daaceeeef26" (UID: "bf69e00a-7f8e-460a-ab56-7daaceeeef26"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.327117 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.329988 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf69e00a-7f8e-460a-ab56-7daaceeeef26" (UID: "bf69e00a-7f8e-460a-ab56-7daaceeeef26"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.341641 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf69e00a-7f8e-460a-ab56-7daaceeeef26-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.341674 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.341688 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzbpj\" (UniqueName: \"kubernetes.io/projected/bf69e00a-7f8e-460a-ab56-7daaceeeef26-kube-api-access-fzbpj\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.341700 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/bf69e00a-7f8e-460a-ab56-7daaceeeef26-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.341710 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.341720 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.405782 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-config-data" (OuterVolumeSpecName: "config-data") pod "bf69e00a-7f8e-460a-ab56-7daaceeeef26" (UID: "bf69e00a-7f8e-460a-ab56-7daaceeeef26"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.450300 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtzvb\" (UniqueName: \"kubernetes.io/projected/b9aa8afe-080d-49c5-b812-dba7db048ed9-kube-api-access-jtzvb\") pod \"b9aa8afe-080d-49c5-b812-dba7db048ed9\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.450382 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9aa8afe-080d-49c5-b812-dba7db048ed9-httpd-run\") pod \"b9aa8afe-080d-49c5-b812-dba7db048ed9\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.450409 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-public-tls-certs\") pod \"b9aa8afe-080d-49c5-b812-dba7db048ed9\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.450620 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"b9aa8afe-080d-49c5-b812-dba7db048ed9\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.450654 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-combined-ca-bundle\") pod \"b9aa8afe-080d-49c5-b812-dba7db048ed9\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.450745 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-config-data\") pod \"b9aa8afe-080d-49c5-b812-dba7db048ed9\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.450766 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-scripts\") pod \"b9aa8afe-080d-49c5-b812-dba7db048ed9\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.450789 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9aa8afe-080d-49c5-b812-dba7db048ed9-logs\") pod \"b9aa8afe-080d-49c5-b812-dba7db048ed9\" (UID: \"b9aa8afe-080d-49c5-b812-dba7db048ed9\") " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.450736 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9aa8afe-080d-49c5-b812-dba7db048ed9-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b9aa8afe-080d-49c5-b812-dba7db048ed9" (UID: "b9aa8afe-080d-49c5-b812-dba7db048ed9"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.451378 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b9aa8afe-080d-49c5-b812-dba7db048ed9-logs" (OuterVolumeSpecName: "logs") pod "b9aa8afe-080d-49c5-b812-dba7db048ed9" (UID: "b9aa8afe-080d-49c5-b812-dba7db048ed9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.451834 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b9aa8afe-080d-49c5-b812-dba7db048ed9-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.451855 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b9aa8afe-080d-49c5-b812-dba7db048ed9-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.451865 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf69e00a-7f8e-460a-ab56-7daaceeeef26-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.458308 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-scripts" (OuterVolumeSpecName: "scripts") pod "b9aa8afe-080d-49c5-b812-dba7db048ed9" (UID: "b9aa8afe-080d-49c5-b812-dba7db048ed9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.461573 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9aa8afe-080d-49c5-b812-dba7db048ed9-kube-api-access-jtzvb" (OuterVolumeSpecName: "kube-api-access-jtzvb") pod "b9aa8afe-080d-49c5-b812-dba7db048ed9" (UID: "b9aa8afe-080d-49c5-b812-dba7db048ed9"). InnerVolumeSpecName "kube-api-access-jtzvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.480827 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f" (OuterVolumeSpecName: "glance") pod "b9aa8afe-080d-49c5-b812-dba7db048ed9" (UID: "b9aa8afe-080d-49c5-b812-dba7db048ed9"). InnerVolumeSpecName "pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.503590 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b9aa8afe-080d-49c5-b812-dba7db048ed9" (UID: "b9aa8afe-080d-49c5-b812-dba7db048ed9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.520382 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-config-data" (OuterVolumeSpecName: "config-data") pod "b9aa8afe-080d-49c5-b812-dba7db048ed9" (UID: "b9aa8afe-080d-49c5-b812-dba7db048ed9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.541566 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-fc787d555-k2pps"] Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.542860 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b9aa8afe-080d-49c5-b812-dba7db048ed9" (UID: "b9aa8afe-080d-49c5-b812-dba7db048ed9"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.554645 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") on node \"crc\" " Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.554686 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.554704 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.554715 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.554727 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtzvb\" (UniqueName: \"kubernetes.io/projected/b9aa8afe-080d-49c5-b812-dba7db048ed9-kube-api-access-jtzvb\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.554739 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9aa8afe-080d-49c5-b812-dba7db048ed9-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.594304 4982 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.594464 4982 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f") on node "crc" Dec 05 19:36:22 crc kubenswrapper[4982]: I1205 19:36:22.656692 4982 reconciler_common.go:293] "Volume detached for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.043190 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b9aa8afe-080d-49c5-b812-dba7db048ed9","Type":"ContainerDied","Data":"7cb6aceb419311c7cb9a71d859d9d79fea90712a9cce02f8d85665766f162b6e"} Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.043534 4982 scope.go:117] "RemoveContainer" containerID="73cc935b8f684877c72bc1969c15c1c851a56db75a8b48d1fdd2a567d9334e1b" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.043469 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.046765 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-fc787d555-k2pps" event={"ID":"4ae7179d-f311-4080-9409-b5315377edea","Type":"ContainerStarted","Data":"286b2a02e1e6190d22669c2fbe5c48550e4c7819144ec8725b40f494744d5916"} Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.046807 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-fc787d555-k2pps" event={"ID":"4ae7179d-f311-4080-9409-b5315377edea","Type":"ContainerStarted","Data":"476eb1a5c5d27e4d1e40f85d8a381875f979317877801c02ad969e9fabf4cc58"} Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.052661 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"bf69e00a-7f8e-460a-ab56-7daaceeeef26","Type":"ContainerDied","Data":"18e0393d4d7380b37988b87f8b673f976fd212f40447dbfff6169df8038d70d3"} Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.052748 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.059625 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2","Type":"ContainerStarted","Data":"340b6e37f294e5baea5656590ed7ab4774312a07294524fe0e299990ac6636ef"} Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.082763 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.921742958 podStartE2EDuration="17.082742817s" podCreationTimestamp="2025-12-05 19:36:06 +0000 UTC" firstStartedPulling="2025-12-05 19:36:07.622570182 +0000 UTC m=+1346.504456177" lastFinishedPulling="2025-12-05 19:36:21.783570041 +0000 UTC m=+1360.665456036" observedRunningTime="2025-12-05 19:36:23.077952587 +0000 UTC m=+1361.959838592" watchObservedRunningTime="2025-12-05 19:36:23.082742817 +0000 UTC m=+1361.964628812" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.137921 4982 scope.go:117] "RemoveContainer" containerID="e1ffacc643eb622b9190264e0d7bc7dbfa1d66d2e6532aed2f5fc811b3a0027b" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.170977 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.180544 4982 scope.go:117] "RemoveContainer" containerID="89a8e5a1d1f4fe8126dbbd2a8839e34cfae22edbbeeb0ec535df4e177bb0759a" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.187212 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.195273 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.210592 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.210810 4982 scope.go:117] "RemoveContainer" containerID="55fe87081814ecf4fdb4b577ae366144b046ca83af874a32d8adb2a98bb37830" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.227562 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:36:23 crc kubenswrapper[4982]: E1205 19:36:23.228677 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="ceilometer-central-agent" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.228847 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="ceilometer-central-agent" Dec 05 19:36:23 crc kubenswrapper[4982]: E1205 19:36:23.228925 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="sg-core" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.229042 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="sg-core" Dec 05 19:36:23 crc kubenswrapper[4982]: E1205 19:36:23.229169 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9aa8afe-080d-49c5-b812-dba7db048ed9" containerName="glance-httpd" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.229242 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9aa8afe-080d-49c5-b812-dba7db048ed9" containerName="glance-httpd" Dec 05 19:36:23 crc kubenswrapper[4982]: E1205 19:36:23.229401 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9aa8afe-080d-49c5-b812-dba7db048ed9" containerName="glance-log" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.229774 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9aa8afe-080d-49c5-b812-dba7db048ed9" containerName="glance-log" Dec 05 19:36:23 crc kubenswrapper[4982]: E1205 19:36:23.229860 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="proxy-httpd" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.229932 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="proxy-httpd" Dec 05 19:36:23 crc kubenswrapper[4982]: E1205 19:36:23.230022 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="ceilometer-notification-agent" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.230103 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="ceilometer-notification-agent" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.230495 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9aa8afe-080d-49c5-b812-dba7db048ed9" containerName="glance-log" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.230638 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="sg-core" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.230725 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="proxy-httpd" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.230799 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9aa8afe-080d-49c5-b812-dba7db048ed9" containerName="glance-httpd" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.230866 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="ceilometer-notification-agent" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.230947 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" containerName="ceilometer-central-agent" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.232532 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.235980 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.238231 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.242603 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.242871 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.244044 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.244226 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.251697 4982 scope.go:117] "RemoveContainer" containerID="92a38e3356104ef13dad2f474994084f14d2e245a48efa7db50a7e39b95e02ec" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271189 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de9f5462-cedf-4860-9b59-bac07091738f-scripts\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271226 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2lt72\" (UniqueName: \"kubernetes.io/projected/521ad732-0bb0-4030-8096-b38717ff158b-kube-api-access-2lt72\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271267 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271292 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271326 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271344 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de9f5462-cedf-4860-9b59-bac07091738f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271365 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzf4z\" (UniqueName: \"kubernetes.io/projected/de9f5462-cedf-4860-9b59-bac07091738f-kube-api-access-rzf4z\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271406 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/521ad732-0bb0-4030-8096-b38717ff158b-run-httpd\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271618 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de9f5462-cedf-4860-9b59-bac07091738f-config-data\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271663 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9f5462-cedf-4860-9b59-bac07091738f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271685 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9f5462-cedf-4860-9b59-bac07091738f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271708 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-config-data\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271804 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/521ad732-0bb0-4030-8096-b38717ff158b-log-httpd\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271855 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de9f5462-cedf-4860-9b59-bac07091738f-logs\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.271910 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-scripts\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.287610 4982 scope.go:117] "RemoveContainer" containerID="06bff5795068f07446af0e61bdb956ec7862b380c928c2134e0fe34e61e49e5f" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.329319 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.337241 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373459 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de9f5462-cedf-4860-9b59-bac07091738f-scripts\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373510 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2lt72\" (UniqueName: \"kubernetes.io/projected/521ad732-0bb0-4030-8096-b38717ff158b-kube-api-access-2lt72\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373554 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373579 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373609 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373632 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de9f5462-cedf-4860-9b59-bac07091738f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373651 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzf4z\" (UniqueName: \"kubernetes.io/projected/de9f5462-cedf-4860-9b59-bac07091738f-kube-api-access-rzf4z\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373718 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/521ad732-0bb0-4030-8096-b38717ff158b-run-httpd\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373749 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de9f5462-cedf-4860-9b59-bac07091738f-config-data\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373765 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9f5462-cedf-4860-9b59-bac07091738f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373781 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9f5462-cedf-4860-9b59-bac07091738f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373796 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-config-data\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373813 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/521ad732-0bb0-4030-8096-b38717ff158b-log-httpd\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373834 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de9f5462-cedf-4860-9b59-bac07091738f-logs\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.373870 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-scripts\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.376305 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/521ad732-0bb0-4030-8096-b38717ff158b-log-httpd\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.376402 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/521ad732-0bb0-4030-8096-b38717ff158b-run-httpd\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.376562 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/de9f5462-cedf-4860-9b59-bac07091738f-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.376908 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de9f5462-cedf-4860-9b59-bac07091738f-logs\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.378024 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.378087 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de9f5462-cedf-4860-9b59-bac07091738f-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.378205 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de9f5462-cedf-4860-9b59-bac07091738f-scripts\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.379909 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-scripts\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.380031 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-config-data\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.380129 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.380179 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/adcd8f961212518a09eddf140065c73138e877cdf387d14b4b72fd8f3cd3396b/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.380387 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.381354 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de9f5462-cedf-4860-9b59-bac07091738f-config-data\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.381705 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/de9f5462-cedf-4860-9b59-bac07091738f-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.393860 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2lt72\" (UniqueName: \"kubernetes.io/projected/521ad732-0bb0-4030-8096-b38717ff158b-kube-api-access-2lt72\") pod \"ceilometer-0\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " pod="openstack/ceilometer-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.395133 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzf4z\" (UniqueName: \"kubernetes.io/projected/de9f5462-cedf-4860-9b59-bac07091738f-kube-api-access-rzf4z\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.407961 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b9aa8afe-080d-49c5-b812-dba7db048ed9" path="/var/lib/kubelet/pods/b9aa8afe-080d-49c5-b812-dba7db048ed9/volumes" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.410408 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf69e00a-7f8e-460a-ab56-7daaceeeef26" path="/var/lib/kubelet/pods/bf69e00a-7f8e-460a-ab56-7daaceeeef26/volumes" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.432871 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9588d0cd-f65f-4d00-aad6-b0639d71562f\") pod \"glance-default-external-api-0\" (UID: \"de9f5462-cedf-4860-9b59-bac07091738f\") " pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.593863 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 19:36:23 crc kubenswrapper[4982]: I1205 19:36:23.628498 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.087529 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-fc787d555-k2pps" event={"ID":"4ae7179d-f311-4080-9409-b5315377edea","Type":"ContainerStarted","Data":"a38e913d9b9c290c46d9f2be3c5ece4fe793e95037ff0883d14712d87a50e7a3"} Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.088766 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.088856 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.093217 4982 generic.go:334] "Generic (PLEG): container finished" podID="4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" containerID="8d88d572844c2446dd092f139ccfa43054b9e61b1e06ce3394126ebd6a3b5391" exitCode=0 Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.093272 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039","Type":"ContainerDied","Data":"8d88d572844c2446dd092f139ccfa43054b9e61b1e06ce3394126ebd6a3b5391"} Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.114042 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-fc787d555-k2pps" podStartSLOduration=9.114026199 podStartE2EDuration="9.114026199s" podCreationTimestamp="2025-12-05 19:36:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:36:24.113067524 +0000 UTC m=+1362.994953519" watchObservedRunningTime="2025-12-05 19:36:24.114026199 +0000 UTC m=+1362.995912194" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.235335 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.244361 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:24 crc kubenswrapper[4982]: W1205 19:36:24.244400 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde9f5462_cedf_4860_9b59_bac07091738f.slice/crio-6672b4bc43c07c9f05b39f4f04a481dbd655724ddb3a18a085e6e99a68ad06c9 WatchSource:0}: Error finding container 6672b4bc43c07c9f05b39f4f04a481dbd655724ddb3a18a085e6e99a68ad06c9: Status 404 returned error can't find the container with id 6672b4bc43c07c9f05b39f4f04a481dbd655724ddb3a18a085e6e99a68ad06c9 Dec 05 19:36:24 crc kubenswrapper[4982]: W1205 19:36:24.248627 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod521ad732_0bb0_4030_8096_b38717ff158b.slice/crio-6d56a6dcdf7906533ee963aff3834975d76d541489ea5736fa9f3deea4fb927a WatchSource:0}: Error finding container 6d56a6dcdf7906533ee963aff3834975d76d541489ea5736fa9f3deea4fb927a: Status 404 returned error can't find the container with id 6d56a6dcdf7906533ee963aff3834975d76d541489ea5736fa9f3deea4fb927a Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.407548 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.605117 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-httpd-run\") pod \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.605202 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-internal-tls-certs\") pod \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.605224 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-scripts\") pod \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.605251 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-combined-ca-bundle\") pod \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.605356 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-config-data\") pod \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.605421 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-logs\") pod \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.605492 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzqxt\" (UniqueName: \"kubernetes.io/projected/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-kube-api-access-mzqxt\") pod \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.605811 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" (UID: "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.605872 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-logs" (OuterVolumeSpecName: "logs") pod "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" (UID: "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.606694 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\" (UID: \"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039\") " Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.607361 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.607437 4982 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.616074 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-kube-api-access-mzqxt" (OuterVolumeSpecName: "kube-api-access-mzqxt") pod "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" (UID: "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039"). InnerVolumeSpecName "kube-api-access-mzqxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.623249 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-scripts" (OuterVolumeSpecName: "scripts") pod "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" (UID: "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.670274 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" (UID: "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.698139 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b" (OuterVolumeSpecName: "glance") pod "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" (UID: "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039"). InnerVolumeSpecName "pvc-9fbefc77-3371-4465-80eb-112fbd1c349b". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.709611 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzqxt\" (UniqueName: \"kubernetes.io/projected/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-kube-api-access-mzqxt\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.709660 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") on node \"crc\" " Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.709672 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.709697 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.733328 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" (UID: "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.764319 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-config-data" (OuterVolumeSpecName: "config-data") pod "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" (UID: "4e81bbea-ba1a-47f3-8b77-eaf44e3e0039"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.781501 4982 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.781666 4982 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-9fbefc77-3371-4465-80eb-112fbd1c349b" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b") on node "crc" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.812979 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.813022 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:24 crc kubenswrapper[4982]: I1205 19:36:24.813032 4982 reconciler_common.go:293] "Volume detached for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.035483 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.106210 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"4e81bbea-ba1a-47f3-8b77-eaf44e3e0039","Type":"ContainerDied","Data":"a8ef43f7877ed09361aa2eaa6154ce47d1d5411c0de29ec05e785edd13623197"} Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.106225 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.107545 4982 scope.go:117] "RemoveContainer" containerID="8d88d572844c2446dd092f139ccfa43054b9e61b1e06ce3394126ebd6a3b5391" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.114205 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"521ad732-0bb0-4030-8096-b38717ff158b","Type":"ContainerStarted","Data":"64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64"} Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.114259 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"521ad732-0bb0-4030-8096-b38717ff158b","Type":"ContainerStarted","Data":"6d56a6dcdf7906533ee963aff3834975d76d541489ea5736fa9f3deea4fb927a"} Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.117943 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"de9f5462-cedf-4860-9b59-bac07091738f","Type":"ContainerStarted","Data":"6672b4bc43c07c9f05b39f4f04a481dbd655724ddb3a18a085e6e99a68ad06c9"} Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.155206 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.159342 4982 scope.go:117] "RemoveContainer" containerID="2738a0bbfda3397a5d061f2c64da95ca95b1573dd45749e895f1c1ae38faa248" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.174074 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.221225 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:36:25 crc kubenswrapper[4982]: E1205 19:36:25.221852 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" containerName="glance-log" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.221913 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" containerName="glance-log" Dec 05 19:36:25 crc kubenswrapper[4982]: E1205 19:36:25.221966 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" containerName="glance-httpd" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.222023 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" containerName="glance-httpd" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.222309 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" containerName="glance-log" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.222380 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" containerName="glance-httpd" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.223637 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.226748 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.226951 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.233793 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.323794 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.324012 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36af0f53-11a4-47a8-9361-acde52280271-scripts\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.324093 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36af0f53-11a4-47a8-9361-acde52280271-logs\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.324241 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36af0f53-11a4-47a8-9361-acde52280271-config-data\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.324459 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-625sx\" (UniqueName: \"kubernetes.io/projected/36af0f53-11a4-47a8-9361-acde52280271-kube-api-access-625sx\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.324537 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36af0f53-11a4-47a8-9361-acde52280271-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.324673 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36af0f53-11a4-47a8-9361-acde52280271-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.324748 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36af0f53-11a4-47a8-9361-acde52280271-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.406989 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4e81bbea-ba1a-47f3-8b77-eaf44e3e0039" path="/var/lib/kubelet/pods/4e81bbea-ba1a-47f3-8b77-eaf44e3e0039/volumes" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.427622 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-625sx\" (UniqueName: \"kubernetes.io/projected/36af0f53-11a4-47a8-9361-acde52280271-kube-api-access-625sx\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.427666 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36af0f53-11a4-47a8-9361-acde52280271-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.427697 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36af0f53-11a4-47a8-9361-acde52280271-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.427719 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36af0f53-11a4-47a8-9361-acde52280271-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.427776 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.427809 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36af0f53-11a4-47a8-9361-acde52280271-scripts\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.427829 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36af0f53-11a4-47a8-9361-acde52280271-logs\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.427883 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36af0f53-11a4-47a8-9361-acde52280271-config-data\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.431009 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.431036 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/31401ebfd1660da8e240395dcb349960c63d08b34963a13b7abf991967d4dead/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.431493 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/36af0f53-11a4-47a8-9361-acde52280271-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.431740 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/36af0f53-11a4-47a8-9361-acde52280271-logs\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.434998 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/36af0f53-11a4-47a8-9361-acde52280271-scripts\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.436245 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/36af0f53-11a4-47a8-9361-acde52280271-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.436355 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/36af0f53-11a4-47a8-9361-acde52280271-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.436649 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/36af0f53-11a4-47a8-9361-acde52280271-config-data\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.447859 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-625sx\" (UniqueName: \"kubernetes.io/projected/36af0f53-11a4-47a8-9361-acde52280271-kube-api-access-625sx\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.491255 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-9fbefc77-3371-4465-80eb-112fbd1c349b\") pod \"glance-default-internal-api-0\" (UID: \"36af0f53-11a4-47a8-9361-acde52280271\") " pod="openstack/glance-default-internal-api-0" Dec 05 19:36:25 crc kubenswrapper[4982]: I1205 19:36:25.549027 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:26 crc kubenswrapper[4982]: I1205 19:36:26.163339 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"521ad732-0bb0-4030-8096-b38717ff158b","Type":"ContainerStarted","Data":"1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95"} Dec 05 19:36:26 crc kubenswrapper[4982]: I1205 19:36:26.167554 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"de9f5462-cedf-4860-9b59-bac07091738f","Type":"ContainerStarted","Data":"7273ea374f7e3aeb9ee50fd4a926c87333c6eb426907a4e8a4f38da490514d49"} Dec 05 19:36:26 crc kubenswrapper[4982]: I1205 19:36:26.167962 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"de9f5462-cedf-4860-9b59-bac07091738f","Type":"ContainerStarted","Data":"2f7cbe6c2b77eaf526d68f8430725aa4a005079fa7b5996e06f25bb755edb34b"} Dec 05 19:36:26 crc kubenswrapper[4982]: I1205 19:36:26.207385 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.207366983 podStartE2EDuration="3.207366983s" podCreationTimestamp="2025-12-05 19:36:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:36:26.193289316 +0000 UTC m=+1365.075175311" watchObservedRunningTime="2025-12-05 19:36:26.207366983 +0000 UTC m=+1365.089252978" Dec 05 19:36:26 crc kubenswrapper[4982]: I1205 19:36:26.296898 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 19:36:27 crc kubenswrapper[4982]: I1205 19:36:27.189848 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36af0f53-11a4-47a8-9361-acde52280271","Type":"ContainerStarted","Data":"b99986b44e7930c220b537310940168861983cd003d7310999f31e69cd5d61c7"} Dec 05 19:36:27 crc kubenswrapper[4982]: I1205 19:36:27.190524 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36af0f53-11a4-47a8-9361-acde52280271","Type":"ContainerStarted","Data":"3de037ec9112ed96761be30eb1d7db1c75feb2b17a1502488e57176489757e5d"} Dec 05 19:36:27 crc kubenswrapper[4982]: I1205 19:36:27.206266 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"521ad732-0bb0-4030-8096-b38717ff158b","Type":"ContainerStarted","Data":"f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805"} Dec 05 19:36:28 crc kubenswrapper[4982]: I1205 19:36:28.218550 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"36af0f53-11a4-47a8-9361-acde52280271","Type":"ContainerStarted","Data":"1ce82b39fbf3fa10a3b9a0a5e5669d7bf554b2796ff00f9862a94e68fce5f5a5"} Dec 05 19:36:28 crc kubenswrapper[4982]: I1205 19:36:28.221051 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"521ad732-0bb0-4030-8096-b38717ff158b","Type":"ContainerStarted","Data":"7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274"} Dec 05 19:36:28 crc kubenswrapper[4982]: I1205 19:36:28.221269 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="ceilometer-central-agent" containerID="cri-o://64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64" gracePeriod=30 Dec 05 19:36:28 crc kubenswrapper[4982]: I1205 19:36:28.221353 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 19:36:28 crc kubenswrapper[4982]: I1205 19:36:28.221377 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="ceilometer-notification-agent" containerID="cri-o://1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95" gracePeriod=30 Dec 05 19:36:28 crc kubenswrapper[4982]: I1205 19:36:28.221380 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="sg-core" containerID="cri-o://f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805" gracePeriod=30 Dec 05 19:36:28 crc kubenswrapper[4982]: I1205 19:36:28.221482 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="proxy-httpd" containerID="cri-o://7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274" gracePeriod=30 Dec 05 19:36:28 crc kubenswrapper[4982]: I1205 19:36:28.246567 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.246549456 podStartE2EDuration="3.246549456s" podCreationTimestamp="2025-12-05 19:36:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:36:28.24037229 +0000 UTC m=+1367.122258285" watchObservedRunningTime="2025-12-05 19:36:28.246549456 +0000 UTC m=+1367.128435451" Dec 05 19:36:28 crc kubenswrapper[4982]: I1205 19:36:28.272342 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.971377805 podStartE2EDuration="5.272318418s" podCreationTimestamp="2025-12-05 19:36:23 +0000 UTC" firstStartedPulling="2025-12-05 19:36:24.253698633 +0000 UTC m=+1363.135584628" lastFinishedPulling="2025-12-05 19:36:27.554639246 +0000 UTC m=+1366.436525241" observedRunningTime="2025-12-05 19:36:28.260618862 +0000 UTC m=+1367.142504857" watchObservedRunningTime="2025-12-05 19:36:28.272318418 +0000 UTC m=+1367.154204433" Dec 05 19:36:29 crc kubenswrapper[4982]: I1205 19:36:29.235179 4982 generic.go:334] "Generic (PLEG): container finished" podID="521ad732-0bb0-4030-8096-b38717ff158b" containerID="7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274" exitCode=0 Dec 05 19:36:29 crc kubenswrapper[4982]: I1205 19:36:29.235506 4982 generic.go:334] "Generic (PLEG): container finished" podID="521ad732-0bb0-4030-8096-b38717ff158b" containerID="f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805" exitCode=2 Dec 05 19:36:29 crc kubenswrapper[4982]: I1205 19:36:29.235241 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"521ad732-0bb0-4030-8096-b38717ff158b","Type":"ContainerDied","Data":"7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274"} Dec 05 19:36:29 crc kubenswrapper[4982]: I1205 19:36:29.235547 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"521ad732-0bb0-4030-8096-b38717ff158b","Type":"ContainerDied","Data":"f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805"} Dec 05 19:36:29 crc kubenswrapper[4982]: I1205 19:36:29.235585 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"521ad732-0bb0-4030-8096-b38717ff158b","Type":"ContainerDied","Data":"1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95"} Dec 05 19:36:29 crc kubenswrapper[4982]: I1205 19:36:29.235518 4982 generic.go:334] "Generic (PLEG): container finished" podID="521ad732-0bb0-4030-8096-b38717ff158b" containerID="1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95" exitCode=0 Dec 05 19:36:29 crc kubenswrapper[4982]: I1205 19:36:29.997106 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.000722 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020083 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-combined-ca-bundle\") pod \"521ad732-0bb0-4030-8096-b38717ff158b\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020218 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/521ad732-0bb0-4030-8096-b38717ff158b-run-httpd\") pod \"521ad732-0bb0-4030-8096-b38717ff158b\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020260 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-config-data\") pod \"521ad732-0bb0-4030-8096-b38717ff158b\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020287 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzp99\" (UniqueName: \"kubernetes.io/projected/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-kube-api-access-qzp99\") pod \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020319 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-scripts\") pod \"521ad732-0bb0-4030-8096-b38717ff158b\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020348 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/521ad732-0bb0-4030-8096-b38717ff158b-log-httpd\") pod \"521ad732-0bb0-4030-8096-b38717ff158b\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020374 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-scripts\") pod \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020421 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-logs\") pod \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020479 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2lt72\" (UniqueName: \"kubernetes.io/projected/521ad732-0bb0-4030-8096-b38717ff158b-kube-api-access-2lt72\") pod \"521ad732-0bb0-4030-8096-b38717ff158b\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020511 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-certs\") pod \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020557 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-config-data-custom\") pod \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020593 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-config-data\") pod \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020631 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-combined-ca-bundle\") pod \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\" (UID: \"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.020661 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-sg-core-conf-yaml\") pod \"521ad732-0bb0-4030-8096-b38717ff158b\" (UID: \"521ad732-0bb0-4030-8096-b38717ff158b\") " Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.021026 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-logs" (OuterVolumeSpecName: "logs") pod "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" (UID: "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.021076 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/521ad732-0bb0-4030-8096-b38717ff158b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "521ad732-0bb0-4030-8096-b38717ff158b" (UID: "521ad732-0bb0-4030-8096-b38717ff158b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.021456 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/521ad732-0bb0-4030-8096-b38717ff158b-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.021483 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.021483 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/521ad732-0bb0-4030-8096-b38717ff158b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "521ad732-0bb0-4030-8096-b38717ff158b" (UID: "521ad732-0bb0-4030-8096-b38717ff158b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.054696 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-scripts" (OuterVolumeSpecName: "scripts") pod "521ad732-0bb0-4030-8096-b38717ff158b" (UID: "521ad732-0bb0-4030-8096-b38717ff158b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.054839 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/521ad732-0bb0-4030-8096-b38717ff158b-kube-api-access-2lt72" (OuterVolumeSpecName: "kube-api-access-2lt72") pod "521ad732-0bb0-4030-8096-b38717ff158b" (UID: "521ad732-0bb0-4030-8096-b38717ff158b"). InnerVolumeSpecName "kube-api-access-2lt72". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.055218 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-kube-api-access-qzp99" (OuterVolumeSpecName: "kube-api-access-qzp99") pod "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" (UID: "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2"). InnerVolumeSpecName "kube-api-access-qzp99". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.055288 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-scripts" (OuterVolumeSpecName: "scripts") pod "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" (UID: "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.056170 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-certs" (OuterVolumeSpecName: "certs") pod "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" (UID: "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.056405 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" (UID: "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.083261 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" (UID: "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.099115 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "521ad732-0bb0-4030-8096-b38717ff158b" (UID: "521ad732-0bb0-4030-8096-b38717ff158b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.130183 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/521ad732-0bb0-4030-8096-b38717ff158b-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.130217 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzp99\" (UniqueName: \"kubernetes.io/projected/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-kube-api-access-qzp99\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.130229 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.130247 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.130256 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2lt72\" (UniqueName: \"kubernetes.io/projected/521ad732-0bb0-4030-8096-b38717ff158b-kube-api-access-2lt72\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.130266 4982 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.130278 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.130287 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.130296 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.141244 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-config-data" (OuterVolumeSpecName: "config-data") pod "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" (UID: "d64e387d-a88a-4b3e-ad6c-b7660aa61ee2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.158725 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "521ad732-0bb0-4030-8096-b38717ff158b" (UID: "521ad732-0bb0-4030-8096-b38717ff158b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.195303 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-config-data" (OuterVolumeSpecName: "config-data") pod "521ad732-0bb0-4030-8096-b38717ff158b" (UID: "521ad732-0bb0-4030-8096-b38717ff158b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.232462 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.232696 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.232757 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/521ad732-0bb0-4030-8096-b38717ff158b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.247858 4982 generic.go:334] "Generic (PLEG): container finished" podID="521ad732-0bb0-4030-8096-b38717ff158b" containerID="64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64" exitCode=0 Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.248663 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"521ad732-0bb0-4030-8096-b38717ff158b","Type":"ContainerDied","Data":"64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64"} Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.248743 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"521ad732-0bb0-4030-8096-b38717ff158b","Type":"ContainerDied","Data":"6d56a6dcdf7906533ee963aff3834975d76d541489ea5736fa9f3deea4fb927a"} Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.248839 4982 scope.go:117] "RemoveContainer" containerID="7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.249016 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.251622 4982 generic.go:334] "Generic (PLEG): container finished" podID="d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" containerID="7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4" exitCode=137 Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.251665 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2","Type":"ContainerDied","Data":"7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4"} Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.251692 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"d64e387d-a88a-4b3e-ad6c-b7660aa61ee2","Type":"ContainerDied","Data":"dc14543fdd1cfa237c1e34e4a3f35ec4fa4217c4ba296436b98e0e78fa59fc6d"} Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.251727 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.289918 4982 scope.go:117] "RemoveContainer" containerID="f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.299012 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.305107 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.312786 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:36:30 crc kubenswrapper[4982]: E1205 19:36:30.313353 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="proxy-httpd" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.313377 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="proxy-httpd" Dec 05 19:36:30 crc kubenswrapper[4982]: E1205 19:36:30.313405 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="ceilometer-central-agent" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.313414 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="ceilometer-central-agent" Dec 05 19:36:30 crc kubenswrapper[4982]: E1205 19:36:30.313429 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="sg-core" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.313437 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="sg-core" Dec 05 19:36:30 crc kubenswrapper[4982]: E1205 19:36:30.313454 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" containerName="cloudkitty-api-log" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.313463 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" containerName="cloudkitty-api-log" Dec 05 19:36:30 crc kubenswrapper[4982]: E1205 19:36:30.313484 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="ceilometer-notification-agent" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.313494 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="ceilometer-notification-agent" Dec 05 19:36:30 crc kubenswrapper[4982]: E1205 19:36:30.313529 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" containerName="cloudkitty-api" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.313537 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" containerName="cloudkitty-api" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.313739 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" containerName="cloudkitty-api-log" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.313761 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="ceilometer-central-agent" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.313778 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="proxy-httpd" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.313799 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="ceilometer-notification-agent" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.313812 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="521ad732-0bb0-4030-8096-b38717ff158b" containerName="sg-core" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.313830 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" containerName="cloudkitty-api" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.315105 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.324937 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.333630 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.335087 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch48x\" (UniqueName: \"kubernetes.io/projected/2d605444-dcdf-4df1-ac2a-3b821d07d390-kube-api-access-ch48x\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.335138 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2d605444-dcdf-4df1-ac2a-3b821d07d390-certs\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.335204 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.335300 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.335345 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-config-data\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.335398 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.335437 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-scripts\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.335643 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.335679 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d605444-dcdf-4df1-ac2a-3b821d07d390-logs\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.340137 4982 scope.go:117] "RemoveContainer" containerID="1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.340708 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.340782 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.340920 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.346462 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.364984 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.367673 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.370040 4982 scope.go:117] "RemoveContainer" containerID="64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.370448 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.370667 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.377848 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.423794 4982 scope.go:117] "RemoveContainer" containerID="7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274" Dec 05 19:36:30 crc kubenswrapper[4982]: E1205 19:36:30.424593 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274\": container with ID starting with 7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274 not found: ID does not exist" containerID="7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.424622 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274"} err="failed to get container status \"7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274\": rpc error: code = NotFound desc = could not find container \"7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274\": container with ID starting with 7960e73689b6be85a62213d83135d8f313f73f5eee450135406e25574ed8b274 not found: ID does not exist" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.424647 4982 scope.go:117] "RemoveContainer" containerID="f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805" Dec 05 19:36:30 crc kubenswrapper[4982]: E1205 19:36:30.424930 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805\": container with ID starting with f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805 not found: ID does not exist" containerID="f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.424953 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805"} err="failed to get container status \"f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805\": rpc error: code = NotFound desc = could not find container \"f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805\": container with ID starting with f8f0d405cffdec5b2df24eae27041b78f13a123e5c65b06756ae69f1bea64805 not found: ID does not exist" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.424974 4982 scope.go:117] "RemoveContainer" containerID="1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95" Dec 05 19:36:30 crc kubenswrapper[4982]: E1205 19:36:30.425507 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95\": container with ID starting with 1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95 not found: ID does not exist" containerID="1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.425530 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95"} err="failed to get container status \"1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95\": rpc error: code = NotFound desc = could not find container \"1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95\": container with ID starting with 1328bcfd844e18894a15f3200a2797a3cc5de9b0a2d26fbee687cd601ceb1a95 not found: ID does not exist" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.425543 4982 scope.go:117] "RemoveContainer" containerID="64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64" Dec 05 19:36:30 crc kubenswrapper[4982]: E1205 19:36:30.430562 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64\": container with ID starting with 64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64 not found: ID does not exist" containerID="64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.430584 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64"} err="failed to get container status \"64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64\": rpc error: code = NotFound desc = could not find container \"64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64\": container with ID starting with 64f7e0f60e62762bd7549c1fcec057e18e1322f9b563aac4584e08b3df488e64 not found: ID does not exist" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.430600 4982 scope.go:117] "RemoveContainer" containerID="7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.437757 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.437814 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.437855 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.437878 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-scripts\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.437897 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41c56561-9d2d-4066-a4c1-97bd1ec47965-log-httpd\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.437925 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-config-data\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.437947 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.437982 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-scripts\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.438074 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41c56561-9d2d-4066-a4c1-97bd1ec47965-run-httpd\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.438097 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d86k5\" (UniqueName: \"kubernetes.io/projected/41c56561-9d2d-4066-a4c1-97bd1ec47965-kube-api-access-d86k5\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.438168 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-config-data\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.438217 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.438239 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d605444-dcdf-4df1-ac2a-3b821d07d390-logs\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.438293 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch48x\" (UniqueName: \"kubernetes.io/projected/2d605444-dcdf-4df1-ac2a-3b821d07d390-kube-api-access-ch48x\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.438328 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2d605444-dcdf-4df1-ac2a-3b821d07d390-certs\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.438363 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.439852 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d605444-dcdf-4df1-ac2a-3b821d07d390-logs\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.444667 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-scripts\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.444760 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.444832 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-config-data\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.445168 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.445565 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.445853 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.447108 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2d605444-dcdf-4df1-ac2a-3b821d07d390-certs\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.455048 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.455467 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-fc787d555-k2pps" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.457287 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch48x\" (UniqueName: \"kubernetes.io/projected/2d605444-dcdf-4df1-ac2a-3b821d07d390-kube-api-access-ch48x\") pod \"cloudkitty-api-0\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.468432 4982 scope.go:117] "RemoveContainer" containerID="a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.541963 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d86k5\" (UniqueName: \"kubernetes.io/projected/41c56561-9d2d-4066-a4c1-97bd1ec47965-kube-api-access-d86k5\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.542016 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41c56561-9d2d-4066-a4c1-97bd1ec47965-run-httpd\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.542056 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-config-data\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.542306 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.542356 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.542380 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-scripts\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.542403 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41c56561-9d2d-4066-a4c1-97bd1ec47965-log-httpd\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.544527 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41c56561-9d2d-4066-a4c1-97bd1ec47965-run-httpd\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.546084 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41c56561-9d2d-4066-a4c1-97bd1ec47965-log-httpd\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.548528 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-config-data\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.549417 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.550044 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-scripts\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.559550 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.564951 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d86k5\" (UniqueName: \"kubernetes.io/projected/41c56561-9d2d-4066-a4c1-97bd1ec47965-kube-api-access-d86k5\") pod \"ceilometer-0\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " pod="openstack/ceilometer-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.565212 4982 scope.go:117] "RemoveContainer" containerID="7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4" Dec 05 19:36:30 crc kubenswrapper[4982]: E1205 19:36:30.565640 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4\": container with ID starting with 7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4 not found: ID does not exist" containerID="7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.565684 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4"} err="failed to get container status \"7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4\": rpc error: code = NotFound desc = could not find container \"7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4\": container with ID starting with 7b3a16020457741a6608879b3e019f383d9b39adec8310db473d8ca201da95f4 not found: ID does not exist" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.565711 4982 scope.go:117] "RemoveContainer" containerID="a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c" Dec 05 19:36:30 crc kubenswrapper[4982]: E1205 19:36:30.566500 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c\": container with ID starting with a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c not found: ID does not exist" containerID="a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.566538 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c"} err="failed to get container status \"a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c\": rpc error: code = NotFound desc = could not find container \"a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c\": container with ID starting with a73f669cf4f639b952cf63bc5f42a3c10c7309bca6f214d9722afef69e84d84c not found: ID does not exist" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.668292 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 19:36:30 crc kubenswrapper[4982]: I1205 19:36:30.694085 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:31 crc kubenswrapper[4982]: I1205 19:36:31.163847 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:36:31 crc kubenswrapper[4982]: W1205 19:36:31.167993 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d605444_dcdf_4df1_ac2a_3b821d07d390.slice/crio-12eac0c79b931dead3e375d7b153b536b1bfad76d008632a651ef36f9c9aab54 WatchSource:0}: Error finding container 12eac0c79b931dead3e375d7b153b536b1bfad76d008632a651ef36f9c9aab54: Status 404 returned error can't find the container with id 12eac0c79b931dead3e375d7b153b536b1bfad76d008632a651ef36f9c9aab54 Dec 05 19:36:31 crc kubenswrapper[4982]: I1205 19:36:31.198132 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:31 crc kubenswrapper[4982]: W1205 19:36:31.198279 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41c56561_9d2d_4066_a4c1_97bd1ec47965.slice/crio-a0cd6e989974c8f28b8a46e5d5eca5bad5dec6736e10e38f591848bbd448932b WatchSource:0}: Error finding container a0cd6e989974c8f28b8a46e5d5eca5bad5dec6736e10e38f591848bbd448932b: Status 404 returned error can't find the container with id a0cd6e989974c8f28b8a46e5d5eca5bad5dec6736e10e38f591848bbd448932b Dec 05 19:36:31 crc kubenswrapper[4982]: I1205 19:36:31.261780 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41c56561-9d2d-4066-a4c1-97bd1ec47965","Type":"ContainerStarted","Data":"a0cd6e989974c8f28b8a46e5d5eca5bad5dec6736e10e38f591848bbd448932b"} Dec 05 19:36:31 crc kubenswrapper[4982]: I1205 19:36:31.271594 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"2d605444-dcdf-4df1-ac2a-3b821d07d390","Type":"ContainerStarted","Data":"12eac0c79b931dead3e375d7b153b536b1bfad76d008632a651ef36f9c9aab54"} Dec 05 19:36:31 crc kubenswrapper[4982]: I1205 19:36:31.407903 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="521ad732-0bb0-4030-8096-b38717ff158b" path="/var/lib/kubelet/pods/521ad732-0bb0-4030-8096-b38717ff158b/volumes" Dec 05 19:36:31 crc kubenswrapper[4982]: I1205 19:36:31.409211 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d64e387d-a88a-4b3e-ad6c-b7660aa61ee2" path="/var/lib/kubelet/pods/d64e387d-a88a-4b3e-ad6c-b7660aa61ee2/volumes" Dec 05 19:36:32 crc kubenswrapper[4982]: I1205 19:36:32.367714 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41c56561-9d2d-4066-a4c1-97bd1ec47965","Type":"ContainerStarted","Data":"3c5c6b38f7d8a96d87cf298d0d83c8935c9ef46e44a1d63d7de777817eda4e99"} Dec 05 19:36:32 crc kubenswrapper[4982]: I1205 19:36:32.392558 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"2d605444-dcdf-4df1-ac2a-3b821d07d390","Type":"ContainerStarted","Data":"3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6"} Dec 05 19:36:32 crc kubenswrapper[4982]: I1205 19:36:32.392603 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"2d605444-dcdf-4df1-ac2a-3b821d07d390","Type":"ContainerStarted","Data":"2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf"} Dec 05 19:36:32 crc kubenswrapper[4982]: I1205 19:36:32.393674 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 05 19:36:32 crc kubenswrapper[4982]: I1205 19:36:32.443967 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=2.443940466 podStartE2EDuration="2.443940466s" podCreationTimestamp="2025-12-05 19:36:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:36:32.437596605 +0000 UTC m=+1371.319482600" watchObservedRunningTime="2025-12-05 19:36:32.443940466 +0000 UTC m=+1371.325826461" Dec 05 19:36:33 crc kubenswrapper[4982]: I1205 19:36:33.406442 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41c56561-9d2d-4066-a4c1-97bd1ec47965","Type":"ContainerStarted","Data":"c0ddb2f449db60e1d95e0afea07e0839af1496fea3b5f68d3503163e2c7b1184"} Dec 05 19:36:33 crc kubenswrapper[4982]: I1205 19:36:33.407016 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41c56561-9d2d-4066-a4c1-97bd1ec47965","Type":"ContainerStarted","Data":"01ae03145b267388793b89030fca5a97ed9a535ccf57f30bf43a4f557471276b"} Dec 05 19:36:33 crc kubenswrapper[4982]: I1205 19:36:33.580346 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:33 crc kubenswrapper[4982]: I1205 19:36:33.595214 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 19:36:33 crc kubenswrapper[4982]: I1205 19:36:33.595298 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 19:36:33 crc kubenswrapper[4982]: I1205 19:36:33.644014 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 19:36:33 crc kubenswrapper[4982]: I1205 19:36:33.657324 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 19:36:34 crc kubenswrapper[4982]: I1205 19:36:34.418451 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 19:36:34 crc kubenswrapper[4982]: I1205 19:36:34.419576 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 19:36:35 crc kubenswrapper[4982]: I1205 19:36:35.456543 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41c56561-9d2d-4066-a4c1-97bd1ec47965","Type":"ContainerStarted","Data":"512ccba067ae54033baaff360d39aa1504dc05153ed0c35108c24328445c0909"} Dec 05 19:36:35 crc kubenswrapper[4982]: I1205 19:36:35.456901 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="ceilometer-central-agent" containerID="cri-o://3c5c6b38f7d8a96d87cf298d0d83c8935c9ef46e44a1d63d7de777817eda4e99" gracePeriod=30 Dec 05 19:36:35 crc kubenswrapper[4982]: I1205 19:36:35.457546 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="proxy-httpd" containerID="cri-o://512ccba067ae54033baaff360d39aa1504dc05153ed0c35108c24328445c0909" gracePeriod=30 Dec 05 19:36:35 crc kubenswrapper[4982]: I1205 19:36:35.457595 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 19:36:35 crc kubenswrapper[4982]: I1205 19:36:35.457611 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="sg-core" containerID="cri-o://c0ddb2f449db60e1d95e0afea07e0839af1496fea3b5f68d3503163e2c7b1184" gracePeriod=30 Dec 05 19:36:35 crc kubenswrapper[4982]: I1205 19:36:35.457643 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="ceilometer-notification-agent" containerID="cri-o://01ae03145b267388793b89030fca5a97ed9a535ccf57f30bf43a4f557471276b" gracePeriod=30 Dec 05 19:36:35 crc kubenswrapper[4982]: I1205 19:36:35.550925 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:35 crc kubenswrapper[4982]: I1205 19:36:35.552053 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:35 crc kubenswrapper[4982]: I1205 19:36:35.601118 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:35 crc kubenswrapper[4982]: I1205 19:36:35.617350 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:35 crc kubenswrapper[4982]: I1205 19:36:35.645998 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.1367612830000002 podStartE2EDuration="5.645978166s" podCreationTimestamp="2025-12-05 19:36:30 +0000 UTC" firstStartedPulling="2025-12-05 19:36:31.200796667 +0000 UTC m=+1370.082682662" lastFinishedPulling="2025-12-05 19:36:34.71001355 +0000 UTC m=+1373.591899545" observedRunningTime="2025-12-05 19:36:35.488674595 +0000 UTC m=+1374.370560590" watchObservedRunningTime="2025-12-05 19:36:35.645978166 +0000 UTC m=+1374.527864161" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.469754 4982 generic.go:334] "Generic (PLEG): container finished" podID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerID="512ccba067ae54033baaff360d39aa1504dc05153ed0c35108c24328445c0909" exitCode=0 Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.470110 4982 generic.go:334] "Generic (PLEG): container finished" podID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerID="c0ddb2f449db60e1d95e0afea07e0839af1496fea3b5f68d3503163e2c7b1184" exitCode=2 Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.470122 4982 generic.go:334] "Generic (PLEG): container finished" podID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerID="01ae03145b267388793b89030fca5a97ed9a535ccf57f30bf43a4f557471276b" exitCode=0 Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.470130 4982 generic.go:334] "Generic (PLEG): container finished" podID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerID="3c5c6b38f7d8a96d87cf298d0d83c8935c9ef46e44a1d63d7de777817eda4e99" exitCode=0 Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.469772 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41c56561-9d2d-4066-a4c1-97bd1ec47965","Type":"ContainerDied","Data":"512ccba067ae54033baaff360d39aa1504dc05153ed0c35108c24328445c0909"} Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.470196 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41c56561-9d2d-4066-a4c1-97bd1ec47965","Type":"ContainerDied","Data":"c0ddb2f449db60e1d95e0afea07e0839af1496fea3b5f68d3503163e2c7b1184"} Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.470210 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41c56561-9d2d-4066-a4c1-97bd1ec47965","Type":"ContainerDied","Data":"01ae03145b267388793b89030fca5a97ed9a535ccf57f30bf43a4f557471276b"} Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.470219 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41c56561-9d2d-4066-a4c1-97bd1ec47965","Type":"ContainerDied","Data":"3c5c6b38f7d8a96d87cf298d0d83c8935c9ef46e44a1d63d7de777817eda4e99"} Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.470228 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"41c56561-9d2d-4066-a4c1-97bd1ec47965","Type":"ContainerDied","Data":"a0cd6e989974c8f28b8a46e5d5eca5bad5dec6736e10e38f591848bbd448932b"} Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.470237 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0cd6e989974c8f28b8a46e5d5eca5bad5dec6736e10e38f591848bbd448932b" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.470431 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.470447 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.470497 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.470526 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.510977 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.518501 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.631345 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.669968 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41c56561-9d2d-4066-a4c1-97bd1ec47965-run-httpd\") pod \"41c56561-9d2d-4066-a4c1-97bd1ec47965\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.670023 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-combined-ca-bundle\") pod \"41c56561-9d2d-4066-a4c1-97bd1ec47965\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.670061 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d86k5\" (UniqueName: \"kubernetes.io/projected/41c56561-9d2d-4066-a4c1-97bd1ec47965-kube-api-access-d86k5\") pod \"41c56561-9d2d-4066-a4c1-97bd1ec47965\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.670094 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-scripts\") pod \"41c56561-9d2d-4066-a4c1-97bd1ec47965\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.670191 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-sg-core-conf-yaml\") pod \"41c56561-9d2d-4066-a4c1-97bd1ec47965\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.670293 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-config-data\") pod \"41c56561-9d2d-4066-a4c1-97bd1ec47965\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.670365 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41c56561-9d2d-4066-a4c1-97bd1ec47965-log-httpd\") pod \"41c56561-9d2d-4066-a4c1-97bd1ec47965\" (UID: \"41c56561-9d2d-4066-a4c1-97bd1ec47965\") " Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.670587 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41c56561-9d2d-4066-a4c1-97bd1ec47965-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "41c56561-9d2d-4066-a4c1-97bd1ec47965" (UID: "41c56561-9d2d-4066-a4c1-97bd1ec47965"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.670836 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/41c56561-9d2d-4066-a4c1-97bd1ec47965-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "41c56561-9d2d-4066-a4c1-97bd1ec47965" (UID: "41c56561-9d2d-4066-a4c1-97bd1ec47965"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.671033 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41c56561-9d2d-4066-a4c1-97bd1ec47965-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.671049 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/41c56561-9d2d-4066-a4c1-97bd1ec47965-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.676492 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-scripts" (OuterVolumeSpecName: "scripts") pod "41c56561-9d2d-4066-a4c1-97bd1ec47965" (UID: "41c56561-9d2d-4066-a4c1-97bd1ec47965"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.694468 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41c56561-9d2d-4066-a4c1-97bd1ec47965-kube-api-access-d86k5" (OuterVolumeSpecName: "kube-api-access-d86k5") pod "41c56561-9d2d-4066-a4c1-97bd1ec47965" (UID: "41c56561-9d2d-4066-a4c1-97bd1ec47965"). InnerVolumeSpecName "kube-api-access-d86k5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.708738 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "41c56561-9d2d-4066-a4c1-97bd1ec47965" (UID: "41c56561-9d2d-4066-a4c1-97bd1ec47965"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.775087 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d86k5\" (UniqueName: \"kubernetes.io/projected/41c56561-9d2d-4066-a4c1-97bd1ec47965-kube-api-access-d86k5\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.775138 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.775174 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.815357 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "41c56561-9d2d-4066-a4c1-97bd1ec47965" (UID: "41c56561-9d2d-4066-a4c1-97bd1ec47965"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.830265 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-config-data" (OuterVolumeSpecName: "config-data") pod "41c56561-9d2d-4066-a4c1-97bd1ec47965" (UID: "41c56561-9d2d-4066-a4c1-97bd1ec47965"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.876890 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:36 crc kubenswrapper[4982]: I1205 19:36:36.876917 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41c56561-9d2d-4066-a4c1-97bd1ec47965-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.478252 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.504641 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.518830 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.529973 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:37 crc kubenswrapper[4982]: E1205 19:36:37.530372 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="proxy-httpd" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.530389 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="proxy-httpd" Dec 05 19:36:37 crc kubenswrapper[4982]: E1205 19:36:37.530403 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="ceilometer-notification-agent" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.530409 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="ceilometer-notification-agent" Dec 05 19:36:37 crc kubenswrapper[4982]: E1205 19:36:37.530455 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="sg-core" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.530462 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="sg-core" Dec 05 19:36:37 crc kubenswrapper[4982]: E1205 19:36:37.530473 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="ceilometer-central-agent" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.530479 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="ceilometer-central-agent" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.530649 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="proxy-httpd" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.530662 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="sg-core" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.530674 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="ceilometer-notification-agent" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.530690 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" containerName="ceilometer-central-agent" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.533108 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.537834 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.537912 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.545980 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.589000 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.589085 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-log-httpd\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.589221 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhhsg\" (UniqueName: \"kubernetes.io/projected/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-kube-api-access-qhhsg\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.589290 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-scripts\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.589356 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-run-httpd\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.589448 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.589495 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-config-data\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.682307 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:37 crc kubenswrapper[4982]: E1205 19:36:37.683041 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[combined-ca-bundle config-data kube-api-access-qhhsg log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="e92e6fcc-f45e-4186-99a4-1b2e9fde07c7" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.692000 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.692059 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-config-data\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.692183 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.692217 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-log-httpd\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.692255 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhhsg\" (UniqueName: \"kubernetes.io/projected/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-kube-api-access-qhhsg\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.692285 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-scripts\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.692327 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-run-httpd\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.692821 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-log-httpd\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.692885 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-run-httpd\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.698798 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-scripts\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.700502 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.701663 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-config-data\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.702719 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:37 crc kubenswrapper[4982]: I1205 19:36:37.717119 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhhsg\" (UniqueName: \"kubernetes.io/projected/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-kube-api-access-qhhsg\") pod \"ceilometer-0\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " pod="openstack/ceilometer-0" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.486881 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.498014 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.498388 4982 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.503262 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.615736 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.637643 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-config-data\") pod \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.637707 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qhhsg\" (UniqueName: \"kubernetes.io/projected/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-kube-api-access-qhhsg\") pod \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.637832 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-combined-ca-bundle\") pod \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.637898 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-sg-core-conf-yaml\") pod \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.637964 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-scripts\") pod \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.638006 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-log-httpd\") pod \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.638091 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-run-httpd\") pod \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\" (UID: \"e92e6fcc-f45e-4186-99a4-1b2e9fde07c7\") " Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.657507 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-kube-api-access-qhhsg" (OuterVolumeSpecName: "kube-api-access-qhhsg") pod "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7" (UID: "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7"). InnerVolumeSpecName "kube-api-access-qhhsg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.661495 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7" (UID: "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.661507 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7" (UID: "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.666382 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-config-data" (OuterVolumeSpecName: "config-data") pod "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7" (UID: "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.685064 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7" (UID: "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.685801 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7" (UID: "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.690394 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-scripts" (OuterVolumeSpecName: "scripts") pod "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7" (UID: "e92e6fcc-f45e-4186-99a4-1b2e9fde07c7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.740468 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.740586 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.740660 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qhhsg\" (UniqueName: \"kubernetes.io/projected/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-kube-api-access-qhhsg\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.740723 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.740777 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.740830 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:38 crc kubenswrapper[4982]: I1205 19:36:38.740881 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.405216 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41c56561-9d2d-4066-a4c1-97bd1ec47965" path="/var/lib/kubelet/pods/41c56561-9d2d-4066-a4c1-97bd1ec47965/volumes" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.494588 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.563224 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.575669 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.590406 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.593626 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.596289 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.596554 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.604040 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.763062 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-scripts\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.763199 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-config-data\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.763263 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.763296 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5xl8\" (UniqueName: \"kubernetes.io/projected/f3dfa6d7-fb5f-4201-9aec-059f99afd349-kube-api-access-n5xl8\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.763469 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.763499 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f3dfa6d7-fb5f-4201-9aec-059f99afd349-run-httpd\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.763557 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f3dfa6d7-fb5f-4201-9aec-059f99afd349-log-httpd\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.864879 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.865170 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f3dfa6d7-fb5f-4201-9aec-059f99afd349-run-httpd\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.865213 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f3dfa6d7-fb5f-4201-9aec-059f99afd349-log-httpd\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.865270 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-scripts\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.865327 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-config-data\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.865367 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.865389 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5xl8\" (UniqueName: \"kubernetes.io/projected/f3dfa6d7-fb5f-4201-9aec-059f99afd349-kube-api-access-n5xl8\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.866712 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f3dfa6d7-fb5f-4201-9aec-059f99afd349-run-httpd\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.866799 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f3dfa6d7-fb5f-4201-9aec-059f99afd349-log-httpd\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.870796 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.870937 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-scripts\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.872281 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-config-data\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.874307 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.886345 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5xl8\" (UniqueName: \"kubernetes.io/projected/f3dfa6d7-fb5f-4201-9aec-059f99afd349-kube-api-access-n5xl8\") pod \"ceilometer-0\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " pod="openstack/ceilometer-0" Dec 05 19:36:39 crc kubenswrapper[4982]: I1205 19:36:39.921450 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:40 crc kubenswrapper[4982]: I1205 19:36:40.428400 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:40 crc kubenswrapper[4982]: W1205 19:36:40.430779 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3dfa6d7_fb5f_4201_9aec_059f99afd349.slice/crio-29152e80117dd73e336f2fa06b5954fcd943f26b2a6aaeecd8c3638c04960e74 WatchSource:0}: Error finding container 29152e80117dd73e336f2fa06b5954fcd943f26b2a6aaeecd8c3638c04960e74: Status 404 returned error can't find the container with id 29152e80117dd73e336f2fa06b5954fcd943f26b2a6aaeecd8c3638c04960e74 Dec 05 19:36:40 crc kubenswrapper[4982]: I1205 19:36:40.506258 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f3dfa6d7-fb5f-4201-9aec-059f99afd349","Type":"ContainerStarted","Data":"29152e80117dd73e336f2fa06b5954fcd943f26b2a6aaeecd8c3638c04960e74"} Dec 05 19:36:41 crc kubenswrapper[4982]: I1205 19:36:41.409439 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e92e6fcc-f45e-4186-99a4-1b2e9fde07c7" path="/var/lib/kubelet/pods/e92e6fcc-f45e-4186-99a4-1b2e9fde07c7/volumes" Dec 05 19:36:41 crc kubenswrapper[4982]: I1205 19:36:41.517869 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f3dfa6d7-fb5f-4201-9aec-059f99afd349","Type":"ContainerStarted","Data":"195b6209253e5be3bc7bc830fe74a54301f6f113885bd4e71b56675cd2925e0b"} Dec 05 19:36:42 crc kubenswrapper[4982]: I1205 19:36:42.531917 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f3dfa6d7-fb5f-4201-9aec-059f99afd349","Type":"ContainerStarted","Data":"cc36a52b706cb06e22e51d300df835dd449c9786a7c6bd6d16479e17d6f647cb"} Dec 05 19:36:42 crc kubenswrapper[4982]: I1205 19:36:42.557386 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:36:42 crc kubenswrapper[4982]: I1205 19:36:42.557435 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:36:44 crc kubenswrapper[4982]: I1205 19:36:44.552203 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f3dfa6d7-fb5f-4201-9aec-059f99afd349","Type":"ContainerStarted","Data":"c027ef0c3e0989d20fba6179f6f2c7d14a05e24df9f8f36f53ae346d843642d4"} Dec 05 19:36:44 crc kubenswrapper[4982]: I1205 19:36:44.796482 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:45 crc kubenswrapper[4982]: I1205 19:36:45.562019 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f3dfa6d7-fb5f-4201-9aec-059f99afd349","Type":"ContainerStarted","Data":"b588778077257b827b9c10ac2621c6a6bfb5f5f1663710098109c40ef09babc3"} Dec 05 19:36:45 crc kubenswrapper[4982]: I1205 19:36:45.563058 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 19:36:45 crc kubenswrapper[4982]: I1205 19:36:45.562908 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="proxy-httpd" containerID="cri-o://b588778077257b827b9c10ac2621c6a6bfb5f5f1663710098109c40ef09babc3" gracePeriod=30 Dec 05 19:36:45 crc kubenswrapper[4982]: I1205 19:36:45.562929 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="sg-core" containerID="cri-o://c027ef0c3e0989d20fba6179f6f2c7d14a05e24df9f8f36f53ae346d843642d4" gracePeriod=30 Dec 05 19:36:45 crc kubenswrapper[4982]: I1205 19:36:45.562944 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="ceilometer-notification-agent" containerID="cri-o://cc36a52b706cb06e22e51d300df835dd449c9786a7c6bd6d16479e17d6f647cb" gracePeriod=30 Dec 05 19:36:45 crc kubenswrapper[4982]: I1205 19:36:45.562383 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="ceilometer-central-agent" containerID="cri-o://195b6209253e5be3bc7bc830fe74a54301f6f113885bd4e71b56675cd2925e0b" gracePeriod=30 Dec 05 19:36:45 crc kubenswrapper[4982]: I1205 19:36:45.592461 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.288984607 podStartE2EDuration="6.592437488s" podCreationTimestamp="2025-12-05 19:36:39 +0000 UTC" firstStartedPulling="2025-12-05 19:36:40.433943119 +0000 UTC m=+1379.315829104" lastFinishedPulling="2025-12-05 19:36:44.73739599 +0000 UTC m=+1383.619281985" observedRunningTime="2025-12-05 19:36:45.585961424 +0000 UTC m=+1384.467847429" watchObservedRunningTime="2025-12-05 19:36:45.592437488 +0000 UTC m=+1384.474323493" Dec 05 19:36:45 crc kubenswrapper[4982]: I1205 19:36:45.907690 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-blhbk"] Dec 05 19:36:45 crc kubenswrapper[4982]: I1205 19:36:45.909242 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-blhbk" Dec 05 19:36:45 crc kubenswrapper[4982]: I1205 19:36:45.923081 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-blhbk"] Dec 05 19:36:45 crc kubenswrapper[4982]: I1205 19:36:45.995939 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-t884j"] Dec 05 19:36:45 crc kubenswrapper[4982]: I1205 19:36:45.997700 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-t884j" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.010021 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-t884j"] Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.087270 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rn9xf\" (UniqueName: \"kubernetes.io/projected/e2a57203-aa68-4b31-96ea-e522f4daf205-kube-api-access-rn9xf\") pod \"nova-api-db-create-blhbk\" (UID: \"e2a57203-aa68-4b31-96ea-e522f4daf205\") " pod="openstack/nova-api-db-create-blhbk" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.087533 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2a57203-aa68-4b31-96ea-e522f4daf205-operator-scripts\") pod \"nova-api-db-create-blhbk\" (UID: \"e2a57203-aa68-4b31-96ea-e522f4daf205\") " pod="openstack/nova-api-db-create-blhbk" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.094806 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-9d07-account-create-update-sn9k2"] Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.096104 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9d07-account-create-update-sn9k2" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.101377 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.105958 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-9d07-account-create-update-sn9k2"] Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.189853 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbk49\" (UniqueName: \"kubernetes.io/projected/81d13df7-6d0f-4034-8186-f6f05e3f15c3-kube-api-access-tbk49\") pod \"nova-cell0-db-create-t884j\" (UID: \"81d13df7-6d0f-4034-8186-f6f05e3f15c3\") " pod="openstack/nova-cell0-db-create-t884j" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.189949 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81d13df7-6d0f-4034-8186-f6f05e3f15c3-operator-scripts\") pod \"nova-cell0-db-create-t884j\" (UID: \"81d13df7-6d0f-4034-8186-f6f05e3f15c3\") " pod="openstack/nova-cell0-db-create-t884j" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.189985 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rn9xf\" (UniqueName: \"kubernetes.io/projected/e2a57203-aa68-4b31-96ea-e522f4daf205-kube-api-access-rn9xf\") pod \"nova-api-db-create-blhbk\" (UID: \"e2a57203-aa68-4b31-96ea-e522f4daf205\") " pod="openstack/nova-api-db-create-blhbk" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.190028 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2a57203-aa68-4b31-96ea-e522f4daf205-operator-scripts\") pod \"nova-api-db-create-blhbk\" (UID: \"e2a57203-aa68-4b31-96ea-e522f4daf205\") " pod="openstack/nova-api-db-create-blhbk" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.190924 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2a57203-aa68-4b31-96ea-e522f4daf205-operator-scripts\") pod \"nova-api-db-create-blhbk\" (UID: \"e2a57203-aa68-4b31-96ea-e522f4daf205\") " pod="openstack/nova-api-db-create-blhbk" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.200979 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-tj2sp"] Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.202627 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-tj2sp" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.214380 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-tj2sp"] Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.235168 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rn9xf\" (UniqueName: \"kubernetes.io/projected/e2a57203-aa68-4b31-96ea-e522f4daf205-kube-api-access-rn9xf\") pod \"nova-api-db-create-blhbk\" (UID: \"e2a57203-aa68-4b31-96ea-e522f4daf205\") " pod="openstack/nova-api-db-create-blhbk" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.267210 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-blhbk" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.292004 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbk49\" (UniqueName: \"kubernetes.io/projected/81d13df7-6d0f-4034-8186-f6f05e3f15c3-kube-api-access-tbk49\") pod \"nova-cell0-db-create-t884j\" (UID: \"81d13df7-6d0f-4034-8186-f6f05e3f15c3\") " pod="openstack/nova-cell0-db-create-t884j" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.292067 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81d13df7-6d0f-4034-8186-f6f05e3f15c3-operator-scripts\") pod \"nova-cell0-db-create-t884j\" (UID: \"81d13df7-6d0f-4034-8186-f6f05e3f15c3\") " pod="openstack/nova-cell0-db-create-t884j" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.292131 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86e5389c-d500-4b5e-a7f6-e070b2f64179-operator-scripts\") pod \"nova-api-9d07-account-create-update-sn9k2\" (UID: \"86e5389c-d500-4b5e-a7f6-e070b2f64179\") " pod="openstack/nova-api-9d07-account-create-update-sn9k2" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.292275 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-646cl\" (UniqueName: \"kubernetes.io/projected/86e5389c-d500-4b5e-a7f6-e070b2f64179-kube-api-access-646cl\") pod \"nova-api-9d07-account-create-update-sn9k2\" (UID: \"86e5389c-d500-4b5e-a7f6-e070b2f64179\") " pod="openstack/nova-api-9d07-account-create-update-sn9k2" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.292964 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81d13df7-6d0f-4034-8186-f6f05e3f15c3-operator-scripts\") pod \"nova-cell0-db-create-t884j\" (UID: \"81d13df7-6d0f-4034-8186-f6f05e3f15c3\") " pod="openstack/nova-cell0-db-create-t884j" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.305409 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-43bb-account-create-update-lt54d"] Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.307217 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-43bb-account-create-update-lt54d" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.312390 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.325728 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbk49\" (UniqueName: \"kubernetes.io/projected/81d13df7-6d0f-4034-8186-f6f05e3f15c3-kube-api-access-tbk49\") pod \"nova-cell0-db-create-t884j\" (UID: \"81d13df7-6d0f-4034-8186-f6f05e3f15c3\") " pod="openstack/nova-cell0-db-create-t884j" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.333403 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-43bb-account-create-update-lt54d"] Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.395063 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6mss\" (UniqueName: \"kubernetes.io/projected/b70fe631-4513-4d09-9122-50730c5fe397-kube-api-access-h6mss\") pod \"nova-cell1-db-create-tj2sp\" (UID: \"b70fe631-4513-4d09-9122-50730c5fe397\") " pod="openstack/nova-cell1-db-create-tj2sp" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.395107 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-646cl\" (UniqueName: \"kubernetes.io/projected/86e5389c-d500-4b5e-a7f6-e070b2f64179-kube-api-access-646cl\") pod \"nova-api-9d07-account-create-update-sn9k2\" (UID: \"86e5389c-d500-4b5e-a7f6-e070b2f64179\") " pod="openstack/nova-api-9d07-account-create-update-sn9k2" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.395170 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb311798-8f2d-4a9a-92fa-3e72f0032912-operator-scripts\") pod \"nova-cell0-43bb-account-create-update-lt54d\" (UID: \"cb311798-8f2d-4a9a-92fa-3e72f0032912\") " pod="openstack/nova-cell0-43bb-account-create-update-lt54d" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.395278 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b70fe631-4513-4d09-9122-50730c5fe397-operator-scripts\") pod \"nova-cell1-db-create-tj2sp\" (UID: \"b70fe631-4513-4d09-9122-50730c5fe397\") " pod="openstack/nova-cell1-db-create-tj2sp" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.395470 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86e5389c-d500-4b5e-a7f6-e070b2f64179-operator-scripts\") pod \"nova-api-9d07-account-create-update-sn9k2\" (UID: \"86e5389c-d500-4b5e-a7f6-e070b2f64179\") " pod="openstack/nova-api-9d07-account-create-update-sn9k2" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.395497 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fb86\" (UniqueName: \"kubernetes.io/projected/cb311798-8f2d-4a9a-92fa-3e72f0032912-kube-api-access-8fb86\") pod \"nova-cell0-43bb-account-create-update-lt54d\" (UID: \"cb311798-8f2d-4a9a-92fa-3e72f0032912\") " pod="openstack/nova-cell0-43bb-account-create-update-lt54d" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.401838 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86e5389c-d500-4b5e-a7f6-e070b2f64179-operator-scripts\") pod \"nova-api-9d07-account-create-update-sn9k2\" (UID: \"86e5389c-d500-4b5e-a7f6-e070b2f64179\") " pod="openstack/nova-api-9d07-account-create-update-sn9k2" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.418822 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-646cl\" (UniqueName: \"kubernetes.io/projected/86e5389c-d500-4b5e-a7f6-e070b2f64179-kube-api-access-646cl\") pod \"nova-api-9d07-account-create-update-sn9k2\" (UID: \"86e5389c-d500-4b5e-a7f6-e070b2f64179\") " pod="openstack/nova-api-9d07-account-create-update-sn9k2" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.423500 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9d07-account-create-update-sn9k2" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.499316 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b70fe631-4513-4d09-9122-50730c5fe397-operator-scripts\") pod \"nova-cell1-db-create-tj2sp\" (UID: \"b70fe631-4513-4d09-9122-50730c5fe397\") " pod="openstack/nova-cell1-db-create-tj2sp" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.499525 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fb86\" (UniqueName: \"kubernetes.io/projected/cb311798-8f2d-4a9a-92fa-3e72f0032912-kube-api-access-8fb86\") pod \"nova-cell0-43bb-account-create-update-lt54d\" (UID: \"cb311798-8f2d-4a9a-92fa-3e72f0032912\") " pod="openstack/nova-cell0-43bb-account-create-update-lt54d" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.499790 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6mss\" (UniqueName: \"kubernetes.io/projected/b70fe631-4513-4d09-9122-50730c5fe397-kube-api-access-h6mss\") pod \"nova-cell1-db-create-tj2sp\" (UID: \"b70fe631-4513-4d09-9122-50730c5fe397\") " pod="openstack/nova-cell1-db-create-tj2sp" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.499857 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb311798-8f2d-4a9a-92fa-3e72f0032912-operator-scripts\") pod \"nova-cell0-43bb-account-create-update-lt54d\" (UID: \"cb311798-8f2d-4a9a-92fa-3e72f0032912\") " pod="openstack/nova-cell0-43bb-account-create-update-lt54d" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.500602 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb311798-8f2d-4a9a-92fa-3e72f0032912-operator-scripts\") pod \"nova-cell0-43bb-account-create-update-lt54d\" (UID: \"cb311798-8f2d-4a9a-92fa-3e72f0032912\") " pod="openstack/nova-cell0-43bb-account-create-update-lt54d" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.500695 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b70fe631-4513-4d09-9122-50730c5fe397-operator-scripts\") pod \"nova-cell1-db-create-tj2sp\" (UID: \"b70fe631-4513-4d09-9122-50730c5fe397\") " pod="openstack/nova-cell1-db-create-tj2sp" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.506505 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-f4b4-account-create-update-6r4nb"] Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.507777 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.519515 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.521193 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f4b4-account-create-update-6r4nb"] Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.524638 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6mss\" (UniqueName: \"kubernetes.io/projected/b70fe631-4513-4d09-9122-50730c5fe397-kube-api-access-h6mss\") pod \"nova-cell1-db-create-tj2sp\" (UID: \"b70fe631-4513-4d09-9122-50730c5fe397\") " pod="openstack/nova-cell1-db-create-tj2sp" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.525619 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fb86\" (UniqueName: \"kubernetes.io/projected/cb311798-8f2d-4a9a-92fa-3e72f0032912-kube-api-access-8fb86\") pod \"nova-cell0-43bb-account-create-update-lt54d\" (UID: \"cb311798-8f2d-4a9a-92fa-3e72f0032912\") " pod="openstack/nova-cell0-43bb-account-create-update-lt54d" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.592380 4982 generic.go:334] "Generic (PLEG): container finished" podID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerID="b588778077257b827b9c10ac2621c6a6bfb5f5f1663710098109c40ef09babc3" exitCode=0 Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.592417 4982 generic.go:334] "Generic (PLEG): container finished" podID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerID="c027ef0c3e0989d20fba6179f6f2c7d14a05e24df9f8f36f53ae346d843642d4" exitCode=2 Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.592428 4982 generic.go:334] "Generic (PLEG): container finished" podID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerID="cc36a52b706cb06e22e51d300df835dd449c9786a7c6bd6d16479e17d6f647cb" exitCode=0 Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.592453 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f3dfa6d7-fb5f-4201-9aec-059f99afd349","Type":"ContainerDied","Data":"b588778077257b827b9c10ac2621c6a6bfb5f5f1663710098109c40ef09babc3"} Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.592483 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f3dfa6d7-fb5f-4201-9aec-059f99afd349","Type":"ContainerDied","Data":"c027ef0c3e0989d20fba6179f6f2c7d14a05e24df9f8f36f53ae346d843642d4"} Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.592495 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f3dfa6d7-fb5f-4201-9aec-059f99afd349","Type":"ContainerDied","Data":"cc36a52b706cb06e22e51d300df835dd449c9786a7c6bd6d16479e17d6f647cb"} Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.602508 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d0b8f169-dd34-48eb-b55f-7bebeaac5abf-operator-scripts\") pod \"nova-cell1-f4b4-account-create-update-6r4nb\" (UID: \"d0b8f169-dd34-48eb-b55f-7bebeaac5abf\") " pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.620958 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-t884j" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.706274 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl4ws\" (UniqueName: \"kubernetes.io/projected/d0b8f169-dd34-48eb-b55f-7bebeaac5abf-kube-api-access-wl4ws\") pod \"nova-cell1-f4b4-account-create-update-6r4nb\" (UID: \"d0b8f169-dd34-48eb-b55f-7bebeaac5abf\") " pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.706437 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d0b8f169-dd34-48eb-b55f-7bebeaac5abf-operator-scripts\") pod \"nova-cell1-f4b4-account-create-update-6r4nb\" (UID: \"d0b8f169-dd34-48eb-b55f-7bebeaac5abf\") " pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.707544 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d0b8f169-dd34-48eb-b55f-7bebeaac5abf-operator-scripts\") pod \"nova-cell1-f4b4-account-create-update-6r4nb\" (UID: \"d0b8f169-dd34-48eb-b55f-7bebeaac5abf\") " pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.717622 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-43bb-account-create-update-lt54d" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.810845 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl4ws\" (UniqueName: \"kubernetes.io/projected/d0b8f169-dd34-48eb-b55f-7bebeaac5abf-kube-api-access-wl4ws\") pod \"nova-cell1-f4b4-account-create-update-6r4nb\" (UID: \"d0b8f169-dd34-48eb-b55f-7bebeaac5abf\") " pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.819655 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-tj2sp" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.828801 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl4ws\" (UniqueName: \"kubernetes.io/projected/d0b8f169-dd34-48eb-b55f-7bebeaac5abf-kube-api-access-wl4ws\") pod \"nova-cell1-f4b4-account-create-update-6r4nb\" (UID: \"d0b8f169-dd34-48eb-b55f-7bebeaac5abf\") " pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.838627 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.877649 4982 scope.go:117] "RemoveContainer" containerID="c71621a537833cf68acfd24b611e3de09fb0b72fc693901f85097a8ca2a6aa3f" Dec 05 19:36:46 crc kubenswrapper[4982]: I1205 19:36:46.879815 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-blhbk"] Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.088866 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-9d07-account-create-update-sn9k2"] Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.094325 4982 scope.go:117] "RemoveContainer" containerID="17e0a975942ff2ade2d8b7c24fb58353db92fa31cfb1b7b44701cb6867944358" Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.143122 4982 scope.go:117] "RemoveContainer" containerID="32cb2d1388ca574f7983e485b64bb82922046870d837981772cdbd71426c780a" Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.288632 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-t884j"] Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.444634 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-43bb-account-create-update-lt54d"] Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.565622 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f4b4-account-create-update-6r4nb"] Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.587847 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-tj2sp"] Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.607993 4982 generic.go:334] "Generic (PLEG): container finished" podID="e2a57203-aa68-4b31-96ea-e522f4daf205" containerID="705c84edcdd57f073ad0a5a426a15b732a6d7cd93c9a2e89368fa4172af23652" exitCode=0 Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.608095 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-blhbk" event={"ID":"e2a57203-aa68-4b31-96ea-e522f4daf205","Type":"ContainerDied","Data":"705c84edcdd57f073ad0a5a426a15b732a6d7cd93c9a2e89368fa4172af23652"} Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.608127 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-blhbk" event={"ID":"e2a57203-aa68-4b31-96ea-e522f4daf205","Type":"ContainerStarted","Data":"492c06d63bcd91fffb6ced66b48ae580784f22c0731fb94f4f0634e4126af681"} Dec 05 19:36:47 crc kubenswrapper[4982]: W1205 19:36:47.609628 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb70fe631_4513_4d09_9122_50730c5fe397.slice/crio-b33bbdcb5645601d7931a7ee2ac8ce865f85bdf0c1587fa2dca8149b47c6754d WatchSource:0}: Error finding container b33bbdcb5645601d7931a7ee2ac8ce865f85bdf0c1587fa2dca8149b47c6754d: Status 404 returned error can't find the container with id b33bbdcb5645601d7931a7ee2ac8ce865f85bdf0c1587fa2dca8149b47c6754d Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.613006 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" event={"ID":"d0b8f169-dd34-48eb-b55f-7bebeaac5abf","Type":"ContainerStarted","Data":"a2a4b66421e9c750e6647c33b56eb7d9c46692ca04e841dde0b049bb3f11bd54"} Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.617888 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-43bb-account-create-update-lt54d" event={"ID":"cb311798-8f2d-4a9a-92fa-3e72f0032912","Type":"ContainerStarted","Data":"9de12768c2b74a5d02efb54f015b222002314575306441f019c7719058b19e33"} Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.631838 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-t884j" event={"ID":"81d13df7-6d0f-4034-8186-f6f05e3f15c3","Type":"ContainerStarted","Data":"89ee36043206e58b6f3ad8b72b084f2c3567bac77a4a47185f596e654f8eea6e"} Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.631880 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-t884j" event={"ID":"81d13df7-6d0f-4034-8186-f6f05e3f15c3","Type":"ContainerStarted","Data":"aec59064fd80915bb17bfb2791cd1af507d93800dab7a85224491554e5db5d46"} Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.636205 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9d07-account-create-update-sn9k2" event={"ID":"86e5389c-d500-4b5e-a7f6-e070b2f64179","Type":"ContainerStarted","Data":"b570e0285b09030ee506c6932baf9c8b048b87430c8f2360a02198de12621036"} Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.636246 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9d07-account-create-update-sn9k2" event={"ID":"86e5389c-d500-4b5e-a7f6-e070b2f64179","Type":"ContainerStarted","Data":"649d207b227ba7455456604d9f98667dfbe8515018492e3987f55cd3ef7fc565"} Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.653722 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-t884j" podStartSLOduration=2.653705419 podStartE2EDuration="2.653705419s" podCreationTimestamp="2025-12-05 19:36:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:36:47.64819744 +0000 UTC m=+1386.530083435" watchObservedRunningTime="2025-12-05 19:36:47.653705419 +0000 UTC m=+1386.535591414" Dec 05 19:36:47 crc kubenswrapper[4982]: I1205 19:36:47.673484 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-9d07-account-create-update-sn9k2" podStartSLOduration=1.6734684990000002 podStartE2EDuration="1.673468499s" podCreationTimestamp="2025-12-05 19:36:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:36:47.670993087 +0000 UTC m=+1386.552879082" watchObservedRunningTime="2025-12-05 19:36:47.673468499 +0000 UTC m=+1386.555354494" Dec 05 19:36:48 crc kubenswrapper[4982]: I1205 19:36:48.647174 4982 generic.go:334] "Generic (PLEG): container finished" podID="81d13df7-6d0f-4034-8186-f6f05e3f15c3" containerID="89ee36043206e58b6f3ad8b72b084f2c3567bac77a4a47185f596e654f8eea6e" exitCode=0 Dec 05 19:36:48 crc kubenswrapper[4982]: I1205 19:36:48.647480 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-t884j" event={"ID":"81d13df7-6d0f-4034-8186-f6f05e3f15c3","Type":"ContainerDied","Data":"89ee36043206e58b6f3ad8b72b084f2c3567bac77a4a47185f596e654f8eea6e"} Dec 05 19:36:48 crc kubenswrapper[4982]: I1205 19:36:48.649908 4982 generic.go:334] "Generic (PLEG): container finished" podID="d0b8f169-dd34-48eb-b55f-7bebeaac5abf" containerID="11f17c71169e9f84bc8e277f6dd5d156260dabd71381c586fbdb857c18ca6dcd" exitCode=0 Dec 05 19:36:48 crc kubenswrapper[4982]: I1205 19:36:48.650039 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" event={"ID":"d0b8f169-dd34-48eb-b55f-7bebeaac5abf","Type":"ContainerDied","Data":"11f17c71169e9f84bc8e277f6dd5d156260dabd71381c586fbdb857c18ca6dcd"} Dec 05 19:36:48 crc kubenswrapper[4982]: I1205 19:36:48.651333 4982 generic.go:334] "Generic (PLEG): container finished" podID="86e5389c-d500-4b5e-a7f6-e070b2f64179" containerID="b570e0285b09030ee506c6932baf9c8b048b87430c8f2360a02198de12621036" exitCode=0 Dec 05 19:36:48 crc kubenswrapper[4982]: I1205 19:36:48.651379 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9d07-account-create-update-sn9k2" event={"ID":"86e5389c-d500-4b5e-a7f6-e070b2f64179","Type":"ContainerDied","Data":"b570e0285b09030ee506c6932baf9c8b048b87430c8f2360a02198de12621036"} Dec 05 19:36:48 crc kubenswrapper[4982]: I1205 19:36:48.652521 4982 generic.go:334] "Generic (PLEG): container finished" podID="b70fe631-4513-4d09-9122-50730c5fe397" containerID="e3088de0ff1ce25a8acb9bd7b737fbc2827ceb776a3890bd556fc75093141bc6" exitCode=0 Dec 05 19:36:48 crc kubenswrapper[4982]: I1205 19:36:48.652565 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-tj2sp" event={"ID":"b70fe631-4513-4d09-9122-50730c5fe397","Type":"ContainerDied","Data":"e3088de0ff1ce25a8acb9bd7b737fbc2827ceb776a3890bd556fc75093141bc6"} Dec 05 19:36:48 crc kubenswrapper[4982]: I1205 19:36:48.652595 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-tj2sp" event={"ID":"b70fe631-4513-4d09-9122-50730c5fe397","Type":"ContainerStarted","Data":"b33bbdcb5645601d7931a7ee2ac8ce865f85bdf0c1587fa2dca8149b47c6754d"} Dec 05 19:36:48 crc kubenswrapper[4982]: I1205 19:36:48.653703 4982 generic.go:334] "Generic (PLEG): container finished" podID="cb311798-8f2d-4a9a-92fa-3e72f0032912" containerID="52855649d07f62bbab80802ca369507eb04bea20184fce99b1bf10363654a418" exitCode=0 Dec 05 19:36:48 crc kubenswrapper[4982]: I1205 19:36:48.653882 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-43bb-account-create-update-lt54d" event={"ID":"cb311798-8f2d-4a9a-92fa-3e72f0032912","Type":"ContainerDied","Data":"52855649d07f62bbab80802ca369507eb04bea20184fce99b1bf10363654a418"} Dec 05 19:36:49 crc kubenswrapper[4982]: I1205 19:36:49.064443 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-blhbk" Dec 05 19:36:49 crc kubenswrapper[4982]: I1205 19:36:49.156747 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2a57203-aa68-4b31-96ea-e522f4daf205-operator-scripts\") pod \"e2a57203-aa68-4b31-96ea-e522f4daf205\" (UID: \"e2a57203-aa68-4b31-96ea-e522f4daf205\") " Dec 05 19:36:49 crc kubenswrapper[4982]: I1205 19:36:49.156946 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rn9xf\" (UniqueName: \"kubernetes.io/projected/e2a57203-aa68-4b31-96ea-e522f4daf205-kube-api-access-rn9xf\") pod \"e2a57203-aa68-4b31-96ea-e522f4daf205\" (UID: \"e2a57203-aa68-4b31-96ea-e522f4daf205\") " Dec 05 19:36:49 crc kubenswrapper[4982]: I1205 19:36:49.157443 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2a57203-aa68-4b31-96ea-e522f4daf205-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e2a57203-aa68-4b31-96ea-e522f4daf205" (UID: "e2a57203-aa68-4b31-96ea-e522f4daf205"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:36:49 crc kubenswrapper[4982]: I1205 19:36:49.157744 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2a57203-aa68-4b31-96ea-e522f4daf205-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:49 crc kubenswrapper[4982]: I1205 19:36:49.175800 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2a57203-aa68-4b31-96ea-e522f4daf205-kube-api-access-rn9xf" (OuterVolumeSpecName: "kube-api-access-rn9xf") pod "e2a57203-aa68-4b31-96ea-e522f4daf205" (UID: "e2a57203-aa68-4b31-96ea-e522f4daf205"). InnerVolumeSpecName "kube-api-access-rn9xf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:49 crc kubenswrapper[4982]: I1205 19:36:49.259518 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rn9xf\" (UniqueName: \"kubernetes.io/projected/e2a57203-aa68-4b31-96ea-e522f4daf205-kube-api-access-rn9xf\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:49 crc kubenswrapper[4982]: I1205 19:36:49.666620 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-blhbk" Dec 05 19:36:49 crc kubenswrapper[4982]: I1205 19:36:49.667096 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-blhbk" event={"ID":"e2a57203-aa68-4b31-96ea-e522f4daf205","Type":"ContainerDied","Data":"492c06d63bcd91fffb6ced66b48ae580784f22c0731fb94f4f0634e4126af681"} Dec 05 19:36:49 crc kubenswrapper[4982]: I1205 19:36:49.667119 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="492c06d63bcd91fffb6ced66b48ae580784f22c0731fb94f4f0634e4126af681" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.221704 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.389082 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d0b8f169-dd34-48eb-b55f-7bebeaac5abf-operator-scripts\") pod \"d0b8f169-dd34-48eb-b55f-7bebeaac5abf\" (UID: \"d0b8f169-dd34-48eb-b55f-7bebeaac5abf\") " Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.389224 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wl4ws\" (UniqueName: \"kubernetes.io/projected/d0b8f169-dd34-48eb-b55f-7bebeaac5abf-kube-api-access-wl4ws\") pod \"d0b8f169-dd34-48eb-b55f-7bebeaac5abf\" (UID: \"d0b8f169-dd34-48eb-b55f-7bebeaac5abf\") " Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.391704 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d0b8f169-dd34-48eb-b55f-7bebeaac5abf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d0b8f169-dd34-48eb-b55f-7bebeaac5abf" (UID: "d0b8f169-dd34-48eb-b55f-7bebeaac5abf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.395738 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0b8f169-dd34-48eb-b55f-7bebeaac5abf-kube-api-access-wl4ws" (OuterVolumeSpecName: "kube-api-access-wl4ws") pod "d0b8f169-dd34-48eb-b55f-7bebeaac5abf" (UID: "d0b8f169-dd34-48eb-b55f-7bebeaac5abf"). InnerVolumeSpecName "kube-api-access-wl4ws". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.492278 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-43bb-account-create-update-lt54d" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.493073 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d0b8f169-dd34-48eb-b55f-7bebeaac5abf-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.493111 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wl4ws\" (UniqueName: \"kubernetes.io/projected/d0b8f169-dd34-48eb-b55f-7bebeaac5abf-kube-api-access-wl4ws\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.504970 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-tj2sp" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.533372 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-t884j" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.542788 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9d07-account-create-update-sn9k2" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.593857 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb311798-8f2d-4a9a-92fa-3e72f0032912-operator-scripts\") pod \"cb311798-8f2d-4a9a-92fa-3e72f0032912\" (UID: \"cb311798-8f2d-4a9a-92fa-3e72f0032912\") " Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.594086 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h6mss\" (UniqueName: \"kubernetes.io/projected/b70fe631-4513-4d09-9122-50730c5fe397-kube-api-access-h6mss\") pod \"b70fe631-4513-4d09-9122-50730c5fe397\" (UID: \"b70fe631-4513-4d09-9122-50730c5fe397\") " Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.594239 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbk49\" (UniqueName: \"kubernetes.io/projected/81d13df7-6d0f-4034-8186-f6f05e3f15c3-kube-api-access-tbk49\") pod \"81d13df7-6d0f-4034-8186-f6f05e3f15c3\" (UID: \"81d13df7-6d0f-4034-8186-f6f05e3f15c3\") " Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.594297 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81d13df7-6d0f-4034-8186-f6f05e3f15c3-operator-scripts\") pod \"81d13df7-6d0f-4034-8186-f6f05e3f15c3\" (UID: \"81d13df7-6d0f-4034-8186-f6f05e3f15c3\") " Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.594323 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b70fe631-4513-4d09-9122-50730c5fe397-operator-scripts\") pod \"b70fe631-4513-4d09-9122-50730c5fe397\" (UID: \"b70fe631-4513-4d09-9122-50730c5fe397\") " Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.594349 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8fb86\" (UniqueName: \"kubernetes.io/projected/cb311798-8f2d-4a9a-92fa-3e72f0032912-kube-api-access-8fb86\") pod \"cb311798-8f2d-4a9a-92fa-3e72f0032912\" (UID: \"cb311798-8f2d-4a9a-92fa-3e72f0032912\") " Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.597020 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81d13df7-6d0f-4034-8186-f6f05e3f15c3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "81d13df7-6d0f-4034-8186-f6f05e3f15c3" (UID: "81d13df7-6d0f-4034-8186-f6f05e3f15c3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.597035 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b70fe631-4513-4d09-9122-50730c5fe397-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b70fe631-4513-4d09-9122-50730c5fe397" (UID: "b70fe631-4513-4d09-9122-50730c5fe397"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.597100 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb311798-8f2d-4a9a-92fa-3e72f0032912-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cb311798-8f2d-4a9a-92fa-3e72f0032912" (UID: "cb311798-8f2d-4a9a-92fa-3e72f0032912"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.602357 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81d13df7-6d0f-4034-8186-f6f05e3f15c3-kube-api-access-tbk49" (OuterVolumeSpecName: "kube-api-access-tbk49") pod "81d13df7-6d0f-4034-8186-f6f05e3f15c3" (UID: "81d13df7-6d0f-4034-8186-f6f05e3f15c3"). InnerVolumeSpecName "kube-api-access-tbk49". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.608362 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b70fe631-4513-4d09-9122-50730c5fe397-kube-api-access-h6mss" (OuterVolumeSpecName: "kube-api-access-h6mss") pod "b70fe631-4513-4d09-9122-50730c5fe397" (UID: "b70fe631-4513-4d09-9122-50730c5fe397"). InnerVolumeSpecName "kube-api-access-h6mss". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.608745 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb311798-8f2d-4a9a-92fa-3e72f0032912-kube-api-access-8fb86" (OuterVolumeSpecName: "kube-api-access-8fb86") pod "cb311798-8f2d-4a9a-92fa-3e72f0032912" (UID: "cb311798-8f2d-4a9a-92fa-3e72f0032912"). InnerVolumeSpecName "kube-api-access-8fb86". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.677567 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-tj2sp" event={"ID":"b70fe631-4513-4d09-9122-50730c5fe397","Type":"ContainerDied","Data":"b33bbdcb5645601d7931a7ee2ac8ce865f85bdf0c1587fa2dca8149b47c6754d"} Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.677604 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b33bbdcb5645601d7931a7ee2ac8ce865f85bdf0c1587fa2dca8149b47c6754d" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.677652 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-tj2sp" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.688968 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-43bb-account-create-update-lt54d" event={"ID":"cb311798-8f2d-4a9a-92fa-3e72f0032912","Type":"ContainerDied","Data":"9de12768c2b74a5d02efb54f015b222002314575306441f019c7719058b19e33"} Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.689038 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-43bb-account-create-update-lt54d" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.690553 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9de12768c2b74a5d02efb54f015b222002314575306441f019c7719058b19e33" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.690593 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-9d07-account-create-update-sn9k2" event={"ID":"86e5389c-d500-4b5e-a7f6-e070b2f64179","Type":"ContainerDied","Data":"649d207b227ba7455456604d9f98667dfbe8515018492e3987f55cd3ef7fc565"} Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.690608 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="649d207b227ba7455456604d9f98667dfbe8515018492e3987f55cd3ef7fc565" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.691719 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-t884j" event={"ID":"81d13df7-6d0f-4034-8186-f6f05e3f15c3","Type":"ContainerDied","Data":"aec59064fd80915bb17bfb2791cd1af507d93800dab7a85224491554e5db5d46"} Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.691741 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aec59064fd80915bb17bfb2791cd1af507d93800dab7a85224491554e5db5d46" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.691783 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-t884j" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.695924 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86e5389c-d500-4b5e-a7f6-e070b2f64179-operator-scripts\") pod \"86e5389c-d500-4b5e-a7f6-e070b2f64179\" (UID: \"86e5389c-d500-4b5e-a7f6-e070b2f64179\") " Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.695969 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-646cl\" (UniqueName: \"kubernetes.io/projected/86e5389c-d500-4b5e-a7f6-e070b2f64179-kube-api-access-646cl\") pod \"86e5389c-d500-4b5e-a7f6-e070b2f64179\" (UID: \"86e5389c-d500-4b5e-a7f6-e070b2f64179\") " Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.696647 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86e5389c-d500-4b5e-a7f6-e070b2f64179-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "86e5389c-d500-4b5e-a7f6-e070b2f64179" (UID: "86e5389c-d500-4b5e-a7f6-e070b2f64179"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.696808 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h6mss\" (UniqueName: \"kubernetes.io/projected/b70fe631-4513-4d09-9122-50730c5fe397-kube-api-access-h6mss\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.696831 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbk49\" (UniqueName: \"kubernetes.io/projected/81d13df7-6d0f-4034-8186-f6f05e3f15c3-kube-api-access-tbk49\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.696845 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81d13df7-6d0f-4034-8186-f6f05e3f15c3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.696857 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b70fe631-4513-4d09-9122-50730c5fe397-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.696870 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8fb86\" (UniqueName: \"kubernetes.io/projected/cb311798-8f2d-4a9a-92fa-3e72f0032912-kube-api-access-8fb86\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.696882 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cb311798-8f2d-4a9a-92fa-3e72f0032912-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.699514 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86e5389c-d500-4b5e-a7f6-e070b2f64179-kube-api-access-646cl" (OuterVolumeSpecName: "kube-api-access-646cl") pod "86e5389c-d500-4b5e-a7f6-e070b2f64179" (UID: "86e5389c-d500-4b5e-a7f6-e070b2f64179"). InnerVolumeSpecName "kube-api-access-646cl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.702221 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" event={"ID":"d0b8f169-dd34-48eb-b55f-7bebeaac5abf","Type":"ContainerDied","Data":"a2a4b66421e9c750e6647c33b56eb7d9c46692ca04e841dde0b049bb3f11bd54"} Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.702276 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2a4b66421e9c750e6647c33b56eb7d9c46692ca04e841dde0b049bb3f11bd54" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.702348 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f4b4-account-create-update-6r4nb" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.707656 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-9d07-account-create-update-sn9k2" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.798857 4982 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86e5389c-d500-4b5e-a7f6-e070b2f64179-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:50 crc kubenswrapper[4982]: I1205 19:36:50.798894 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-646cl\" (UniqueName: \"kubernetes.io/projected/86e5389c-d500-4b5e-a7f6-e070b2f64179-kube-api-access-646cl\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:52 crc kubenswrapper[4982]: I1205 19:36:52.725717 4982 generic.go:334] "Generic (PLEG): container finished" podID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerID="195b6209253e5be3bc7bc830fe74a54301f6f113885bd4e71b56675cd2925e0b" exitCode=0 Dec 05 19:36:52 crc kubenswrapper[4982]: I1205 19:36:52.726056 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f3dfa6d7-fb5f-4201-9aec-059f99afd349","Type":"ContainerDied","Data":"195b6209253e5be3bc7bc830fe74a54301f6f113885bd4e71b56675cd2925e0b"} Dec 05 19:36:52 crc kubenswrapper[4982]: I1205 19:36:52.898602 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.039197 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-config-data\") pod \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.039404 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f3dfa6d7-fb5f-4201-9aec-059f99afd349-run-httpd\") pod \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.039542 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-sg-core-conf-yaml\") pod \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.039758 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f3dfa6d7-fb5f-4201-9aec-059f99afd349-log-httpd\") pod \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.039842 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3dfa6d7-fb5f-4201-9aec-059f99afd349-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f3dfa6d7-fb5f-4201-9aec-059f99afd349" (UID: "f3dfa6d7-fb5f-4201-9aec-059f99afd349"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.039880 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-scripts\") pod \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.040006 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5xl8\" (UniqueName: \"kubernetes.io/projected/f3dfa6d7-fb5f-4201-9aec-059f99afd349-kube-api-access-n5xl8\") pod \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.040060 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3dfa6d7-fb5f-4201-9aec-059f99afd349-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f3dfa6d7-fb5f-4201-9aec-059f99afd349" (UID: "f3dfa6d7-fb5f-4201-9aec-059f99afd349"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.040117 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-combined-ca-bundle\") pod \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\" (UID: \"f3dfa6d7-fb5f-4201-9aec-059f99afd349\") " Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.041199 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f3dfa6d7-fb5f-4201-9aec-059f99afd349-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.041246 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f3dfa6d7-fb5f-4201-9aec-059f99afd349-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.044554 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3dfa6d7-fb5f-4201-9aec-059f99afd349-kube-api-access-n5xl8" (OuterVolumeSpecName: "kube-api-access-n5xl8") pod "f3dfa6d7-fb5f-4201-9aec-059f99afd349" (UID: "f3dfa6d7-fb5f-4201-9aec-059f99afd349"). InnerVolumeSpecName "kube-api-access-n5xl8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.045311 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-scripts" (OuterVolumeSpecName: "scripts") pod "f3dfa6d7-fb5f-4201-9aec-059f99afd349" (UID: "f3dfa6d7-fb5f-4201-9aec-059f99afd349"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.078293 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f3dfa6d7-fb5f-4201-9aec-059f99afd349" (UID: "f3dfa6d7-fb5f-4201-9aec-059f99afd349"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.129431 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3dfa6d7-fb5f-4201-9aec-059f99afd349" (UID: "f3dfa6d7-fb5f-4201-9aec-059f99afd349"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.143516 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.143554 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.143568 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5xl8\" (UniqueName: \"kubernetes.io/projected/f3dfa6d7-fb5f-4201-9aec-059f99afd349-kube-api-access-n5xl8\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.143580 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.159747 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-config-data" (OuterVolumeSpecName: "config-data") pod "f3dfa6d7-fb5f-4201-9aec-059f99afd349" (UID: "f3dfa6d7-fb5f-4201-9aec-059f99afd349"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.245361 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f3dfa6d7-fb5f-4201-9aec-059f99afd349-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.740385 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f3dfa6d7-fb5f-4201-9aec-059f99afd349","Type":"ContainerDied","Data":"29152e80117dd73e336f2fa06b5954fcd943f26b2a6aaeecd8c3638c04960e74"} Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.740474 4982 scope.go:117] "RemoveContainer" containerID="b588778077257b827b9c10ac2621c6a6bfb5f5f1663710098109c40ef09babc3" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.740677 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.776380 4982 scope.go:117] "RemoveContainer" containerID="c027ef0c3e0989d20fba6179f6f2c7d14a05e24df9f8f36f53ae346d843642d4" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.786470 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.810131 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.821269 4982 scope.go:117] "RemoveContainer" containerID="cc36a52b706cb06e22e51d300df835dd449c9786a7c6bd6d16479e17d6f647cb" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.836238 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:53 crc kubenswrapper[4982]: E1205 19:36:53.836832 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b70fe631-4513-4d09-9122-50730c5fe397" containerName="mariadb-database-create" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.836857 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b70fe631-4513-4d09-9122-50730c5fe397" containerName="mariadb-database-create" Dec 05 19:36:53 crc kubenswrapper[4982]: E1205 19:36:53.836882 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2a57203-aa68-4b31-96ea-e522f4daf205" containerName="mariadb-database-create" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.836894 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2a57203-aa68-4b31-96ea-e522f4daf205" containerName="mariadb-database-create" Dec 05 19:36:53 crc kubenswrapper[4982]: E1205 19:36:53.836925 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="sg-core" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.836935 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="sg-core" Dec 05 19:36:53 crc kubenswrapper[4982]: E1205 19:36:53.836963 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="ceilometer-central-agent" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.836975 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="ceilometer-central-agent" Dec 05 19:36:53 crc kubenswrapper[4982]: E1205 19:36:53.836992 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb311798-8f2d-4a9a-92fa-3e72f0032912" containerName="mariadb-account-create-update" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837003 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb311798-8f2d-4a9a-92fa-3e72f0032912" containerName="mariadb-account-create-update" Dec 05 19:36:53 crc kubenswrapper[4982]: E1205 19:36:53.837023 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0b8f169-dd34-48eb-b55f-7bebeaac5abf" containerName="mariadb-account-create-update" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837035 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0b8f169-dd34-48eb-b55f-7bebeaac5abf" containerName="mariadb-account-create-update" Dec 05 19:36:53 crc kubenswrapper[4982]: E1205 19:36:53.837055 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81d13df7-6d0f-4034-8186-f6f05e3f15c3" containerName="mariadb-database-create" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837067 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="81d13df7-6d0f-4034-8186-f6f05e3f15c3" containerName="mariadb-database-create" Dec 05 19:36:53 crc kubenswrapper[4982]: E1205 19:36:53.837082 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86e5389c-d500-4b5e-a7f6-e070b2f64179" containerName="mariadb-account-create-update" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837092 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="86e5389c-d500-4b5e-a7f6-e070b2f64179" containerName="mariadb-account-create-update" Dec 05 19:36:53 crc kubenswrapper[4982]: E1205 19:36:53.837116 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="proxy-httpd" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837128 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="proxy-httpd" Dec 05 19:36:53 crc kubenswrapper[4982]: E1205 19:36:53.837176 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="ceilometer-notification-agent" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837190 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="ceilometer-notification-agent" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837508 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2a57203-aa68-4b31-96ea-e522f4daf205" containerName="mariadb-database-create" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837528 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0b8f169-dd34-48eb-b55f-7bebeaac5abf" containerName="mariadb-account-create-update" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837550 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b70fe631-4513-4d09-9122-50730c5fe397" containerName="mariadb-database-create" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837573 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="ceilometer-notification-agent" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837588 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="ceilometer-central-agent" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837604 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="proxy-httpd" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837618 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" containerName="sg-core" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837638 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="81d13df7-6d0f-4034-8186-f6f05e3f15c3" containerName="mariadb-database-create" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837650 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="86e5389c-d500-4b5e-a7f6-e070b2f64179" containerName="mariadb-account-create-update" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.837666 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb311798-8f2d-4a9a-92fa-3e72f0032912" containerName="mariadb-account-create-update" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.840955 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.841903 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.844124 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.844359 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.859091 4982 scope.go:117] "RemoveContainer" containerID="195b6209253e5be3bc7bc830fe74a54301f6f113885bd4e71b56675cd2925e0b" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.960649 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.960989 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.961228 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjt6s\" (UniqueName: \"kubernetes.io/projected/b4d15752-5cc7-4fd1-a682-514b6353f725-kube-api-access-fjt6s\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.961372 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-scripts\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.961730 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4d15752-5cc7-4fd1-a682-514b6353f725-run-httpd\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.961917 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4d15752-5cc7-4fd1-a682-514b6353f725-log-httpd\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:53 crc kubenswrapper[4982]: I1205 19:36:53.962040 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-config-data\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.063641 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4d15752-5cc7-4fd1-a682-514b6353f725-run-httpd\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.063701 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4d15752-5cc7-4fd1-a682-514b6353f725-log-httpd\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.063745 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-config-data\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.063789 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.063851 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.063918 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjt6s\" (UniqueName: \"kubernetes.io/projected/b4d15752-5cc7-4fd1-a682-514b6353f725-kube-api-access-fjt6s\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.063938 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-scripts\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.064267 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4d15752-5cc7-4fd1-a682-514b6353f725-log-httpd\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.064909 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4d15752-5cc7-4fd1-a682-514b6353f725-run-httpd\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.069025 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.072855 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-scripts\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.075916 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-config-data\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.081588 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.087333 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjt6s\" (UniqueName: \"kubernetes.io/projected/b4d15752-5cc7-4fd1-a682-514b6353f725-kube-api-access-fjt6s\") pod \"ceilometer-0\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.176614 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.657549 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:36:54 crc kubenswrapper[4982]: I1205 19:36:54.752001 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4d15752-5cc7-4fd1-a682-514b6353f725","Type":"ContainerStarted","Data":"f975babd9156d2d13a2b8434a063a698cbc7552f73c36a4f66b88f555a6a47e2"} Dec 05 19:36:55 crc kubenswrapper[4982]: I1205 19:36:55.403751 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3dfa6d7-fb5f-4201-9aec-059f99afd349" path="/var/lib/kubelet/pods/f3dfa6d7-fb5f-4201-9aec-059f99afd349/volumes" Dec 05 19:36:55 crc kubenswrapper[4982]: I1205 19:36:55.766623 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4d15752-5cc7-4fd1-a682-514b6353f725","Type":"ContainerStarted","Data":"5d43d5c4f0b661a529792083f6092739009e4251e48eaf34bf2620faf1ad3415"} Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.554144 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dpwvj"] Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.555909 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.559501 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.560029 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.562052 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-vnlpb" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.570857 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dpwvj"] Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.622203 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvj2d\" (UniqueName: \"kubernetes.io/projected/621c36ce-6cd3-4d73-8c51-c278a760bdad-kube-api-access-jvj2d\") pod \"nova-cell0-conductor-db-sync-dpwvj\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.622299 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-scripts\") pod \"nova-cell0-conductor-db-sync-dpwvj\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.622594 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-dpwvj\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.622639 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-config-data\") pod \"nova-cell0-conductor-db-sync-dpwvj\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.725017 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-scripts\") pod \"nova-cell0-conductor-db-sync-dpwvj\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.725384 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-dpwvj\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.725412 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-config-data\") pod \"nova-cell0-conductor-db-sync-dpwvj\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.725532 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvj2d\" (UniqueName: \"kubernetes.io/projected/621c36ce-6cd3-4d73-8c51-c278a760bdad-kube-api-access-jvj2d\") pod \"nova-cell0-conductor-db-sync-dpwvj\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.733199 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-scripts\") pod \"nova-cell0-conductor-db-sync-dpwvj\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.733264 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-dpwvj\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.733460 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-config-data\") pod \"nova-cell0-conductor-db-sync-dpwvj\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.746666 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvj2d\" (UniqueName: \"kubernetes.io/projected/621c36ce-6cd3-4d73-8c51-c278a760bdad-kube-api-access-jvj2d\") pod \"nova-cell0-conductor-db-sync-dpwvj\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.778984 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4d15752-5cc7-4fd1-a682-514b6353f725","Type":"ContainerStarted","Data":"d7e847b92efc445148f09b89bcadd95565514df5a01a9ee6627c70c144e95420"} Dec 05 19:36:56 crc kubenswrapper[4982]: I1205 19:36:56.873462 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:36:57 crc kubenswrapper[4982]: W1205 19:36:57.382029 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod621c36ce_6cd3_4d73_8c51_c278a760bdad.slice/crio-c79e40e13e0423a4bf5e0c8d2e1087d7f50d5a9f5fe1f8e2188e1d69268c07d7 WatchSource:0}: Error finding container c79e40e13e0423a4bf5e0c8d2e1087d7f50d5a9f5fe1f8e2188e1d69268c07d7: Status 404 returned error can't find the container with id c79e40e13e0423a4bf5e0c8d2e1087d7f50d5a9f5fe1f8e2188e1d69268c07d7 Dec 05 19:36:57 crc kubenswrapper[4982]: I1205 19:36:57.385795 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dpwvj"] Dec 05 19:36:57 crc kubenswrapper[4982]: I1205 19:36:57.794765 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4d15752-5cc7-4fd1-a682-514b6353f725","Type":"ContainerStarted","Data":"c7f8a8449c824c9c191ea4026acd2699c676cbc5e4f257d0151c7e77a1a60938"} Dec 05 19:36:57 crc kubenswrapper[4982]: I1205 19:36:57.796036 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dpwvj" event={"ID":"621c36ce-6cd3-4d73-8c51-c278a760bdad","Type":"ContainerStarted","Data":"c79e40e13e0423a4bf5e0c8d2e1087d7f50d5a9f5fe1f8e2188e1d69268c07d7"} Dec 05 19:36:58 crc kubenswrapper[4982]: I1205 19:36:58.808745 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4d15752-5cc7-4fd1-a682-514b6353f725","Type":"ContainerStarted","Data":"4de546e0e7c9b8887cab9701ed9d3fcc84a83edc41d9821a8770a520a67cc4bd"} Dec 05 19:36:58 crc kubenswrapper[4982]: I1205 19:36:58.810212 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 19:36:58 crc kubenswrapper[4982]: I1205 19:36:58.838483 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.182856719 podStartE2EDuration="5.838461037s" podCreationTimestamp="2025-12-05 19:36:53 +0000 UTC" firstStartedPulling="2025-12-05 19:36:54.658704246 +0000 UTC m=+1393.540590261" lastFinishedPulling="2025-12-05 19:36:58.314308584 +0000 UTC m=+1397.196194579" observedRunningTime="2025-12-05 19:36:58.823523949 +0000 UTC m=+1397.705409944" watchObservedRunningTime="2025-12-05 19:36:58.838461037 +0000 UTC m=+1397.720347032" Dec 05 19:37:06 crc kubenswrapper[4982]: I1205 19:37:06.884395 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dpwvj" event={"ID":"621c36ce-6cd3-4d73-8c51-c278a760bdad","Type":"ContainerStarted","Data":"24864d8c2a496c525444d9d094c8553342163ec17200ded69e1df927a0a3f5cc"} Dec 05 19:37:08 crc kubenswrapper[4982]: I1205 19:37:08.148248 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Dec 05 19:37:08 crc kubenswrapper[4982]: I1205 19:37:08.170247 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-dpwvj" podStartSLOduration=3.840540288 podStartE2EDuration="12.170221057s" podCreationTimestamp="2025-12-05 19:36:56 +0000 UTC" firstStartedPulling="2025-12-05 19:36:57.385934951 +0000 UTC m=+1396.267820946" lastFinishedPulling="2025-12-05 19:37:05.71561572 +0000 UTC m=+1404.597501715" observedRunningTime="2025-12-05 19:37:06.906926868 +0000 UTC m=+1405.788812863" watchObservedRunningTime="2025-12-05 19:37:08.170221057 +0000 UTC m=+1407.052107062" Dec 05 19:37:12 crc kubenswrapper[4982]: I1205 19:37:12.557499 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:37:12 crc kubenswrapper[4982]: I1205 19:37:12.558066 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:37:16 crc kubenswrapper[4982]: I1205 19:37:16.031947 4982 generic.go:334] "Generic (PLEG): container finished" podID="621c36ce-6cd3-4d73-8c51-c278a760bdad" containerID="24864d8c2a496c525444d9d094c8553342163ec17200ded69e1df927a0a3f5cc" exitCode=0 Dec 05 19:37:16 crc kubenswrapper[4982]: I1205 19:37:16.032051 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dpwvj" event={"ID":"621c36ce-6cd3-4d73-8c51-c278a760bdad","Type":"ContainerDied","Data":"24864d8c2a496c525444d9d094c8553342163ec17200ded69e1df927a0a3f5cc"} Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.460784 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.578453 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvj2d\" (UniqueName: \"kubernetes.io/projected/621c36ce-6cd3-4d73-8c51-c278a760bdad-kube-api-access-jvj2d\") pod \"621c36ce-6cd3-4d73-8c51-c278a760bdad\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.579054 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-combined-ca-bundle\") pod \"621c36ce-6cd3-4d73-8c51-c278a760bdad\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.579172 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-scripts\") pod \"621c36ce-6cd3-4d73-8c51-c278a760bdad\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.579233 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-config-data\") pod \"621c36ce-6cd3-4d73-8c51-c278a760bdad\" (UID: \"621c36ce-6cd3-4d73-8c51-c278a760bdad\") " Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.584187 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/621c36ce-6cd3-4d73-8c51-c278a760bdad-kube-api-access-jvj2d" (OuterVolumeSpecName: "kube-api-access-jvj2d") pod "621c36ce-6cd3-4d73-8c51-c278a760bdad" (UID: "621c36ce-6cd3-4d73-8c51-c278a760bdad"). InnerVolumeSpecName "kube-api-access-jvj2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.584810 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-scripts" (OuterVolumeSpecName: "scripts") pod "621c36ce-6cd3-4d73-8c51-c278a760bdad" (UID: "621c36ce-6cd3-4d73-8c51-c278a760bdad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.612736 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-config-data" (OuterVolumeSpecName: "config-data") pod "621c36ce-6cd3-4d73-8c51-c278a760bdad" (UID: "621c36ce-6cd3-4d73-8c51-c278a760bdad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.617265 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "621c36ce-6cd3-4d73-8c51-c278a760bdad" (UID: "621c36ce-6cd3-4d73-8c51-c278a760bdad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.682846 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvj2d\" (UniqueName: \"kubernetes.io/projected/621c36ce-6cd3-4d73-8c51-c278a760bdad-kube-api-access-jvj2d\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.682883 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.682893 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:17 crc kubenswrapper[4982]: I1205 19:37:17.682902 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/621c36ce-6cd3-4d73-8c51-c278a760bdad-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.053416 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-dpwvj" event={"ID":"621c36ce-6cd3-4d73-8c51-c278a760bdad","Type":"ContainerDied","Data":"c79e40e13e0423a4bf5e0c8d2e1087d7f50d5a9f5fe1f8e2188e1d69268c07d7"} Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.053481 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c79e40e13e0423a4bf5e0c8d2e1087d7f50d5a9f5fe1f8e2188e1d69268c07d7" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.053480 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-dpwvj" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.188878 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 19:37:18 crc kubenswrapper[4982]: E1205 19:37:18.189328 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="621c36ce-6cd3-4d73-8c51-c278a760bdad" containerName="nova-cell0-conductor-db-sync" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.189342 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="621c36ce-6cd3-4d73-8c51-c278a760bdad" containerName="nova-cell0-conductor-db-sync" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.189538 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="621c36ce-6cd3-4d73-8c51-c278a760bdad" containerName="nova-cell0-conductor-db-sync" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.190240 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.192337 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-vnlpb" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.193904 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.203399 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.294814 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7fa7720-ccc7-4ddf-8102-e10818187b20-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c7fa7720-ccc7-4ddf-8102-e10818187b20\") " pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.294863 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7fa7720-ccc7-4ddf-8102-e10818187b20-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c7fa7720-ccc7-4ddf-8102-e10818187b20\") " pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.295667 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8h5d\" (UniqueName: \"kubernetes.io/projected/c7fa7720-ccc7-4ddf-8102-e10818187b20-kube-api-access-z8h5d\") pod \"nova-cell0-conductor-0\" (UID: \"c7fa7720-ccc7-4ddf-8102-e10818187b20\") " pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.397682 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8h5d\" (UniqueName: \"kubernetes.io/projected/c7fa7720-ccc7-4ddf-8102-e10818187b20-kube-api-access-z8h5d\") pod \"nova-cell0-conductor-0\" (UID: \"c7fa7720-ccc7-4ddf-8102-e10818187b20\") " pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.397750 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7fa7720-ccc7-4ddf-8102-e10818187b20-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c7fa7720-ccc7-4ddf-8102-e10818187b20\") " pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.397780 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7fa7720-ccc7-4ddf-8102-e10818187b20-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c7fa7720-ccc7-4ddf-8102-e10818187b20\") " pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.401564 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7fa7720-ccc7-4ddf-8102-e10818187b20-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"c7fa7720-ccc7-4ddf-8102-e10818187b20\") " pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.410111 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7fa7720-ccc7-4ddf-8102-e10818187b20-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"c7fa7720-ccc7-4ddf-8102-e10818187b20\") " pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.416256 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8h5d\" (UniqueName: \"kubernetes.io/projected/c7fa7720-ccc7-4ddf-8102-e10818187b20-kube-api-access-z8h5d\") pod \"nova-cell0-conductor-0\" (UID: \"c7fa7720-ccc7-4ddf-8102-e10818187b20\") " pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.520491 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:18 crc kubenswrapper[4982]: I1205 19:37:18.969413 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 19:37:18 crc kubenswrapper[4982]: W1205 19:37:18.974778 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7fa7720_ccc7_4ddf_8102_e10818187b20.slice/crio-18061168d2375e37d21caf369f7d50acbc0b859d64694be90c3d735fcadc3372 WatchSource:0}: Error finding container 18061168d2375e37d21caf369f7d50acbc0b859d64694be90c3d735fcadc3372: Status 404 returned error can't find the container with id 18061168d2375e37d21caf369f7d50acbc0b859d64694be90c3d735fcadc3372 Dec 05 19:37:19 crc kubenswrapper[4982]: I1205 19:37:19.065980 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c7fa7720-ccc7-4ddf-8102-e10818187b20","Type":"ContainerStarted","Data":"18061168d2375e37d21caf369f7d50acbc0b859d64694be90c3d735fcadc3372"} Dec 05 19:37:20 crc kubenswrapper[4982]: I1205 19:37:20.091766 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"c7fa7720-ccc7-4ddf-8102-e10818187b20","Type":"ContainerStarted","Data":"a393682f8510e872d98111ad2f64f39decbfba95cb7988c1d5ef3c216ddd02c0"} Dec 05 19:37:20 crc kubenswrapper[4982]: I1205 19:37:20.092280 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:20 crc kubenswrapper[4982]: I1205 19:37:20.130033 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.130018737 podStartE2EDuration="2.130018737s" podCreationTimestamp="2025-12-05 19:37:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:37:20.129352561 +0000 UTC m=+1419.011238556" watchObservedRunningTime="2025-12-05 19:37:20.130018737 +0000 UTC m=+1419.011904732" Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.058340 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p5gbn"] Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.062824 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.072267 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p5gbn"] Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.163657 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfr8v\" (UniqueName: \"kubernetes.io/projected/e51a33e0-110e-405e-8979-eda2e527de19-kube-api-access-zfr8v\") pod \"redhat-operators-p5gbn\" (UID: \"e51a33e0-110e-405e-8979-eda2e527de19\") " pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.163707 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51a33e0-110e-405e-8979-eda2e527de19-catalog-content\") pod \"redhat-operators-p5gbn\" (UID: \"e51a33e0-110e-405e-8979-eda2e527de19\") " pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.164034 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51a33e0-110e-405e-8979-eda2e527de19-utilities\") pod \"redhat-operators-p5gbn\" (UID: \"e51a33e0-110e-405e-8979-eda2e527de19\") " pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.266486 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51a33e0-110e-405e-8979-eda2e527de19-utilities\") pod \"redhat-operators-p5gbn\" (UID: \"e51a33e0-110e-405e-8979-eda2e527de19\") " pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.266651 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfr8v\" (UniqueName: \"kubernetes.io/projected/e51a33e0-110e-405e-8979-eda2e527de19-kube-api-access-zfr8v\") pod \"redhat-operators-p5gbn\" (UID: \"e51a33e0-110e-405e-8979-eda2e527de19\") " pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.266685 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51a33e0-110e-405e-8979-eda2e527de19-catalog-content\") pod \"redhat-operators-p5gbn\" (UID: \"e51a33e0-110e-405e-8979-eda2e527de19\") " pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.267065 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51a33e0-110e-405e-8979-eda2e527de19-utilities\") pod \"redhat-operators-p5gbn\" (UID: \"e51a33e0-110e-405e-8979-eda2e527de19\") " pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.267139 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51a33e0-110e-405e-8979-eda2e527de19-catalog-content\") pod \"redhat-operators-p5gbn\" (UID: \"e51a33e0-110e-405e-8979-eda2e527de19\") " pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.285861 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfr8v\" (UniqueName: \"kubernetes.io/projected/e51a33e0-110e-405e-8979-eda2e527de19-kube-api-access-zfr8v\") pod \"redhat-operators-p5gbn\" (UID: \"e51a33e0-110e-405e-8979-eda2e527de19\") " pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.395416 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:21 crc kubenswrapper[4982]: I1205 19:37:21.928486 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p5gbn"] Dec 05 19:37:21 crc kubenswrapper[4982]: W1205 19:37:21.929847 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode51a33e0_110e_405e_8979_eda2e527de19.slice/crio-222702bf285708d7b2cc17c0efd232c2692031a689d39548d75a31f45ecf7191 WatchSource:0}: Error finding container 222702bf285708d7b2cc17c0efd232c2692031a689d39548d75a31f45ecf7191: Status 404 returned error can't find the container with id 222702bf285708d7b2cc17c0efd232c2692031a689d39548d75a31f45ecf7191 Dec 05 19:37:22 crc kubenswrapper[4982]: I1205 19:37:22.110510 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p5gbn" event={"ID":"e51a33e0-110e-405e-8979-eda2e527de19","Type":"ContainerStarted","Data":"222702bf285708d7b2cc17c0efd232c2692031a689d39548d75a31f45ecf7191"} Dec 05 19:37:23 crc kubenswrapper[4982]: I1205 19:37:23.125125 4982 generic.go:334] "Generic (PLEG): container finished" podID="e51a33e0-110e-405e-8979-eda2e527de19" containerID="4faa95efcaa17ea81850e68f215b5df65fe97af458d28603e65087f49e628b8a" exitCode=0 Dec 05 19:37:23 crc kubenswrapper[4982]: I1205 19:37:23.125196 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p5gbn" event={"ID":"e51a33e0-110e-405e-8979-eda2e527de19","Type":"ContainerDied","Data":"4faa95efcaa17ea81850e68f215b5df65fe97af458d28603e65087f49e628b8a"} Dec 05 19:37:24 crc kubenswrapper[4982]: I1205 19:37:24.146998 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p5gbn" event={"ID":"e51a33e0-110e-405e-8979-eda2e527de19","Type":"ContainerStarted","Data":"54beafde54c3a31e5b7e5acc20fd066e0f0f6ad9fc3f587041c5ebc3bd420778"} Dec 05 19:37:24 crc kubenswrapper[4982]: I1205 19:37:24.784499 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 19:37:27 crc kubenswrapper[4982]: I1205 19:37:27.176930 4982 generic.go:334] "Generic (PLEG): container finished" podID="e51a33e0-110e-405e-8979-eda2e527de19" containerID="54beafde54c3a31e5b7e5acc20fd066e0f0f6ad9fc3f587041c5ebc3bd420778" exitCode=0 Dec 05 19:37:27 crc kubenswrapper[4982]: I1205 19:37:27.177012 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p5gbn" event={"ID":"e51a33e0-110e-405e-8979-eda2e527de19","Type":"ContainerDied","Data":"54beafde54c3a31e5b7e5acc20fd066e0f0f6ad9fc3f587041c5ebc3bd420778"} Dec 05 19:37:28 crc kubenswrapper[4982]: I1205 19:37:28.188292 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p5gbn" event={"ID":"e51a33e0-110e-405e-8979-eda2e527de19","Type":"ContainerStarted","Data":"b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864"} Dec 05 19:37:28 crc kubenswrapper[4982]: I1205 19:37:28.208516 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p5gbn" podStartSLOduration=2.7030598770000003 podStartE2EDuration="7.20849992s" podCreationTimestamp="2025-12-05 19:37:21 +0000 UTC" firstStartedPulling="2025-12-05 19:37:23.126969958 +0000 UTC m=+1422.008855953" lastFinishedPulling="2025-12-05 19:37:27.632410001 +0000 UTC m=+1426.514295996" observedRunningTime="2025-12-05 19:37:28.205128344 +0000 UTC m=+1427.087014339" watchObservedRunningTime="2025-12-05 19:37:28.20849992 +0000 UTC m=+1427.090385905" Dec 05 19:37:28 crc kubenswrapper[4982]: I1205 19:37:28.554487 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 19:37:28 crc kubenswrapper[4982]: I1205 19:37:28.700331 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 19:37:28 crc kubenswrapper[4982]: I1205 19:37:28.700544 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="700dc7aa-a441-4419-90c8-ff6ec6d31f23" containerName="kube-state-metrics" containerID="cri-o://1f137d81ec4e7d502590effaff67e660004627a4336b654d4cff1c954480cb1a" gracePeriod=30 Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.213683 4982 generic.go:334] "Generic (PLEG): container finished" podID="700dc7aa-a441-4419-90c8-ff6ec6d31f23" containerID="1f137d81ec4e7d502590effaff67e660004627a4336b654d4cff1c954480cb1a" exitCode=2 Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.213922 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"700dc7aa-a441-4419-90c8-ff6ec6d31f23","Type":"ContainerDied","Data":"1f137d81ec4e7d502590effaff67e660004627a4336b654d4cff1c954480cb1a"} Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.351599 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-l2zcs"] Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.352928 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.361940 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.362345 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.389046 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-l2zcs"] Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.401301 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-scripts\") pod \"nova-cell0-cell-mapping-l2zcs\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.401413 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-config-data\") pod \"nova-cell0-cell-mapping-l2zcs\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.401474 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-l2zcs\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.401502 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br8qt\" (UniqueName: \"kubernetes.io/projected/afa9e944-29e0-416e-8c19-f3b9786c8464-kube-api-access-br8qt\") pod \"nova-cell0-cell-mapping-l2zcs\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.476661 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.503822 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-l2zcs\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.503870 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br8qt\" (UniqueName: \"kubernetes.io/projected/afa9e944-29e0-416e-8c19-f3b9786c8464-kube-api-access-br8qt\") pod \"nova-cell0-cell-mapping-l2zcs\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.504042 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-scripts\") pod \"nova-cell0-cell-mapping-l2zcs\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.504194 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-config-data\") pod \"nova-cell0-cell-mapping-l2zcs\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.514251 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-config-data\") pod \"nova-cell0-cell-mapping-l2zcs\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.517161 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-scripts\") pod \"nova-cell0-cell-mapping-l2zcs\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.518575 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-l2zcs\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.539750 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br8qt\" (UniqueName: \"kubernetes.io/projected/afa9e944-29e0-416e-8c19-f3b9786c8464-kube-api-access-br8qt\") pod \"nova-cell0-cell-mapping-l2zcs\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.606507 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2h9b\" (UniqueName: \"kubernetes.io/projected/700dc7aa-a441-4419-90c8-ff6ec6d31f23-kube-api-access-h2h9b\") pod \"700dc7aa-a441-4419-90c8-ff6ec6d31f23\" (UID: \"700dc7aa-a441-4419-90c8-ff6ec6d31f23\") " Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.611455 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/700dc7aa-a441-4419-90c8-ff6ec6d31f23-kube-api-access-h2h9b" (OuterVolumeSpecName: "kube-api-access-h2h9b") pod "700dc7aa-a441-4419-90c8-ff6ec6d31f23" (UID: "700dc7aa-a441-4419-90c8-ff6ec6d31f23"). InnerVolumeSpecName "kube-api-access-h2h9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.709112 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2h9b\" (UniqueName: \"kubernetes.io/projected/700dc7aa-a441-4419-90c8-ff6ec6d31f23-kube-api-access-h2h9b\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.713614 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 19:37:29 crc kubenswrapper[4982]: E1205 19:37:29.714285 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="700dc7aa-a441-4419-90c8-ff6ec6d31f23" containerName="kube-state-metrics" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.714310 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="700dc7aa-a441-4419-90c8-ff6ec6d31f23" containerName="kube-state-metrics" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.714584 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="700dc7aa-a441-4419-90c8-ff6ec6d31f23" containerName="kube-state-metrics" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.715564 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.730915 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.732873 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.733935 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.736751 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.747582 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.770303 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.771962 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.773983 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.786794 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.811375 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14f5ae40-0fa5-4c05-b172-3edd52b0b313-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.811431 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-logs\") pod \"nova-metadata-0\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.811477 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmccn\" (UniqueName: \"kubernetes.io/projected/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-kube-api-access-lmccn\") pod \"nova-metadata-0\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.811534 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-config-data\") pod \"nova-metadata-0\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.811606 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/268d0151-148e-46e8-9dbe-4227cdd3d0b3-config-data\") pod \"nova-scheduler-0\" (UID: \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.811670 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.811733 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bmfd\" (UniqueName: \"kubernetes.io/projected/268d0151-148e-46e8-9dbe-4227cdd3d0b3-kube-api-access-6bmfd\") pod \"nova-scheduler-0\" (UID: \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.811761 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14f5ae40-0fa5-4c05-b172-3edd52b0b313-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.811784 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/268d0151-148e-46e8-9dbe-4227cdd3d0b3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.811835 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mt7j\" (UniqueName: \"kubernetes.io/projected/14f5ae40-0fa5-4c05-b172-3edd52b0b313-kube-api-access-7mt7j\") pod \"nova-cell1-novncproxy-0\" (UID: \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.820616 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.837919 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.841746 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.849015 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.894986 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.913524 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-config-data\") pod \"nova-metadata-0\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.913617 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58815d17-89de-431e-89ac-4e344ca88e9e-logs\") pod \"nova-api-0\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " pod="openstack/nova-api-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.913641 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58815d17-89de-431e-89ac-4e344ca88e9e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " pod="openstack/nova-api-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.913664 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/268d0151-148e-46e8-9dbe-4227cdd3d0b3-config-data\") pod \"nova-scheduler-0\" (UID: \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.913729 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.913815 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bmfd\" (UniqueName: \"kubernetes.io/projected/268d0151-148e-46e8-9dbe-4227cdd3d0b3-kube-api-access-6bmfd\") pod \"nova-scheduler-0\" (UID: \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.913861 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14f5ae40-0fa5-4c05-b172-3edd52b0b313-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.913879 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/268d0151-148e-46e8-9dbe-4227cdd3d0b3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.913933 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mt7j\" (UniqueName: \"kubernetes.io/projected/14f5ae40-0fa5-4c05-b172-3edd52b0b313-kube-api-access-7mt7j\") pod \"nova-cell1-novncproxy-0\" (UID: \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.913971 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srdxj\" (UniqueName: \"kubernetes.io/projected/58815d17-89de-431e-89ac-4e344ca88e9e-kube-api-access-srdxj\") pod \"nova-api-0\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " pod="openstack/nova-api-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.914010 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14f5ae40-0fa5-4c05-b172-3edd52b0b313-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.914031 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-logs\") pod \"nova-metadata-0\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.914060 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmccn\" (UniqueName: \"kubernetes.io/projected/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-kube-api-access-lmccn\") pod \"nova-metadata-0\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.914108 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58815d17-89de-431e-89ac-4e344ca88e9e-config-data\") pod \"nova-api-0\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " pod="openstack/nova-api-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.921612 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-logs\") pod \"nova-metadata-0\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.926793 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/268d0151-148e-46e8-9dbe-4227cdd3d0b3-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.929924 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14f5ae40-0fa5-4c05-b172-3edd52b0b313-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.926220 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-config-data\") pod \"nova-metadata-0\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.942623 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14f5ae40-0fa5-4c05-b172-3edd52b0b313-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.958807 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.973759 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/268d0151-148e-46e8-9dbe-4227cdd3d0b3-config-data\") pod \"nova-scheduler-0\" (UID: \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.974902 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bmfd\" (UniqueName: \"kubernetes.io/projected/268d0151-148e-46e8-9dbe-4227cdd3d0b3-kube-api-access-6bmfd\") pod \"nova-scheduler-0\" (UID: \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.979584 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmccn\" (UniqueName: \"kubernetes.io/projected/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-kube-api-access-lmccn\") pod \"nova-metadata-0\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " pod="openstack/nova-metadata-0" Dec 05 19:37:29 crc kubenswrapper[4982]: I1205 19:37:29.979884 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mt7j\" (UniqueName: \"kubernetes.io/projected/14f5ae40-0fa5-4c05-b172-3edd52b0b313-kube-api-access-7mt7j\") pod \"nova-cell1-novncproxy-0\" (UID: \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.016740 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58815d17-89de-431e-89ac-4e344ca88e9e-config-data\") pod \"nova-api-0\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " pod="openstack/nova-api-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.016846 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58815d17-89de-431e-89ac-4e344ca88e9e-logs\") pod \"nova-api-0\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " pod="openstack/nova-api-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.016877 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58815d17-89de-431e-89ac-4e344ca88e9e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " pod="openstack/nova-api-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.016991 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srdxj\" (UniqueName: \"kubernetes.io/projected/58815d17-89de-431e-89ac-4e344ca88e9e-kube-api-access-srdxj\") pod \"nova-api-0\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " pod="openstack/nova-api-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.017750 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58815d17-89de-431e-89ac-4e344ca88e9e-logs\") pod \"nova-api-0\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " pod="openstack/nova-api-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.027394 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.027537 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58815d17-89de-431e-89ac-4e344ca88e9e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " pod="openstack/nova-api-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.036381 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.051816 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srdxj\" (UniqueName: \"kubernetes.io/projected/58815d17-89de-431e-89ac-4e344ca88e9e-kube-api-access-srdxj\") pod \"nova-api-0\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " pod="openstack/nova-api-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.054372 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.060467 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58815d17-89de-431e-89ac-4e344ca88e9e-config-data\") pod \"nova-api-0\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " pod="openstack/nova-api-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.099553 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c9cb78d75-t4n2t"] Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.101366 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.107427 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c9cb78d75-t4n2t"] Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.183145 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.208812 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.232073 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-dns-swift-storage-0\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.232495 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.249863 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-dns-svc\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.249927 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-config\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.250012 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.250166 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gvxgp\" (UniqueName: \"kubernetes.io/projected/c78c0950-654a-40c4-8ae4-bf213130fbcf-kube-api-access-gvxgp\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.290741 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"700dc7aa-a441-4419-90c8-ff6ec6d31f23","Type":"ContainerDied","Data":"09a48a7c542ae697d46ab1190e9c87f4958128b302da9da5d50b2febb74a597c"} Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.290788 4982 scope.go:117] "RemoveContainer" containerID="1f137d81ec4e7d502590effaff67e660004627a4336b654d4cff1c954480cb1a" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.290965 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.346194 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.352946 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-dns-svc\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.353025 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-config\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.353118 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.353242 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gvxgp\" (UniqueName: \"kubernetes.io/projected/c78c0950-654a-40c4-8ae4-bf213130fbcf-kube-api-access-gvxgp\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.353279 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-dns-swift-storage-0\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.353386 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.354998 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.355521 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-dns-svc\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.356021 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-config\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.356378 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.356752 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-dns-swift-storage-0\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.360711 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.377583 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gvxgp\" (UniqueName: \"kubernetes.io/projected/c78c0950-654a-40c4-8ae4-bf213130fbcf-kube-api-access-gvxgp\") pod \"dnsmasq-dns-7c9cb78d75-t4n2t\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.384591 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.386076 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.388574 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.393539 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.422950 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.435371 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.455378 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/babd6ca2-04d1-4f51-aaa6-d621a339d799-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"babd6ca2-04d1-4f51-aaa6-d621a339d799\") " pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.455438 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/babd6ca2-04d1-4f51-aaa6-d621a339d799-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"babd6ca2-04d1-4f51-aaa6-d621a339d799\") " pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.455597 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wxjp\" (UniqueName: \"kubernetes.io/projected/babd6ca2-04d1-4f51-aaa6-d621a339d799-kube-api-access-5wxjp\") pod \"kube-state-metrics-0\" (UID: \"babd6ca2-04d1-4f51-aaa6-d621a339d799\") " pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.455631 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/babd6ca2-04d1-4f51-aaa6-d621a339d799-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"babd6ca2-04d1-4f51-aaa6-d621a339d799\") " pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.557251 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/babd6ca2-04d1-4f51-aaa6-d621a339d799-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"babd6ca2-04d1-4f51-aaa6-d621a339d799\") " pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.557563 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wxjp\" (UniqueName: \"kubernetes.io/projected/babd6ca2-04d1-4f51-aaa6-d621a339d799-kube-api-access-5wxjp\") pod \"kube-state-metrics-0\" (UID: \"babd6ca2-04d1-4f51-aaa6-d621a339d799\") " pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.557679 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/babd6ca2-04d1-4f51-aaa6-d621a339d799-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"babd6ca2-04d1-4f51-aaa6-d621a339d799\") " pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.561832 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/babd6ca2-04d1-4f51-aaa6-d621a339d799-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"babd6ca2-04d1-4f51-aaa6-d621a339d799\") " pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.567871 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/babd6ca2-04d1-4f51-aaa6-d621a339d799-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"babd6ca2-04d1-4f51-aaa6-d621a339d799\") " pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.568185 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/babd6ca2-04d1-4f51-aaa6-d621a339d799-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"babd6ca2-04d1-4f51-aaa6-d621a339d799\") " pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.576659 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/babd6ca2-04d1-4f51-aaa6-d621a339d799-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"babd6ca2-04d1-4f51-aaa6-d621a339d799\") " pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.584764 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wxjp\" (UniqueName: \"kubernetes.io/projected/babd6ca2-04d1-4f51-aaa6-d621a339d799-kube-api-access-5wxjp\") pod \"kube-state-metrics-0\" (UID: \"babd6ca2-04d1-4f51-aaa6-d621a339d799\") " pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.670884 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.681282 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-l2zcs"] Dec 05 19:37:30 crc kubenswrapper[4982]: W1205 19:37:30.688540 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podafa9e944_29e0_416e_8c19_f3b9786c8464.slice/crio-fb04b6c8a146b5611bfbc20556c2b59726f25800d204e83132ec76da7c5829c6 WatchSource:0}: Error finding container fb04b6c8a146b5611bfbc20556c2b59726f25800d204e83132ec76da7c5829c6: Status 404 returned error can't find the container with id fb04b6c8a146b5611bfbc20556c2b59726f25800d204e83132ec76da7c5829c6 Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.716329 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.748192 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-42bfv"] Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.749506 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.751533 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.751931 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.758306 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-42bfv"] Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.874884 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57mlz\" (UniqueName: \"kubernetes.io/projected/b1fb8006-cfc6-412d-90be-9bb828949621-kube-api-access-57mlz\") pod \"nova-cell1-conductor-db-sync-42bfv\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.875232 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-scripts\") pod \"nova-cell1-conductor-db-sync-42bfv\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.875293 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-42bfv\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.877031 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-config-data\") pod \"nova-cell1-conductor-db-sync-42bfv\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.951239 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 19:37:30 crc kubenswrapper[4982]: W1205 19:37:30.973031 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14f5ae40_0fa5_4c05_b172_3edd52b0b313.slice/crio-af0d3add732370b996c196b237d21ebf5fcfd724d59691bd4a608bfb6385d88f WatchSource:0}: Error finding container af0d3add732370b996c196b237d21ebf5fcfd724d59691bd4a608bfb6385d88f: Status 404 returned error can't find the container with id af0d3add732370b996c196b237d21ebf5fcfd724d59691bd4a608bfb6385d88f Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.978311 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57mlz\" (UniqueName: \"kubernetes.io/projected/b1fb8006-cfc6-412d-90be-9bb828949621-kube-api-access-57mlz\") pod \"nova-cell1-conductor-db-sync-42bfv\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.978345 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-scripts\") pod \"nova-cell1-conductor-db-sync-42bfv\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.978439 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-42bfv\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.978523 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-config-data\") pod \"nova-cell1-conductor-db-sync-42bfv\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.983745 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-scripts\") pod \"nova-cell1-conductor-db-sync-42bfv\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.986832 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-42bfv\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:30 crc kubenswrapper[4982]: I1205 19:37:30.988080 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-config-data\") pod \"nova-cell1-conductor-db-sync-42bfv\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.012519 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57mlz\" (UniqueName: \"kubernetes.io/projected/b1fb8006-cfc6-412d-90be-9bb828949621-kube-api-access-57mlz\") pod \"nova-cell1-conductor-db-sync-42bfv\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.075350 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:31 crc kubenswrapper[4982]: W1205 19:37:31.077364 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod26fd4fe1_5d01_403f_aeb9_1bbb36f008e4.slice/crio-404983df7ccb1d1e408ab8d194a1d61c5a6f6c6dd1104d08f0f2dce94025a241 WatchSource:0}: Error finding container 404983df7ccb1d1e408ab8d194a1d61c5a6f6c6dd1104d08f0f2dce94025a241: Status 404 returned error can't find the container with id 404983df7ccb1d1e408ab8d194a1d61c5a6f6c6dd1104d08f0f2dce94025a241 Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.080978 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.102584 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.272613 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c9cb78d75-t4n2t"] Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.314377 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" event={"ID":"c78c0950-654a-40c4-8ae4-bf213130fbcf","Type":"ContainerStarted","Data":"9a80bae82d193ced15a59edfc36c1531773341018384210173573f97f63dd51b"} Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.316481 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4","Type":"ContainerStarted","Data":"404983df7ccb1d1e408ab8d194a1d61c5a6f6c6dd1104d08f0f2dce94025a241"} Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.325643 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-l2zcs" event={"ID":"afa9e944-29e0-416e-8c19-f3b9786c8464","Type":"ContainerStarted","Data":"8b1dcee64e05cb676a934e0168f923dfd361a873491f0b05eda5d10f9b9c0fd4"} Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.325692 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-l2zcs" event={"ID":"afa9e944-29e0-416e-8c19-f3b9786c8464","Type":"ContainerStarted","Data":"fb04b6c8a146b5611bfbc20556c2b59726f25800d204e83132ec76da7c5829c6"} Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.329346 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"268d0151-148e-46e8-9dbe-4227cdd3d0b3","Type":"ContainerStarted","Data":"214488a7c39f40b7514ea11645be14dd3941b12570c713898b09f1ff96a761cc"} Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.345132 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"14f5ae40-0fa5-4c05-b172-3edd52b0b313","Type":"ContainerStarted","Data":"af0d3add732370b996c196b237d21ebf5fcfd724d59691bd4a608bfb6385d88f"} Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.346682 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-l2zcs" podStartSLOduration=2.346644242 podStartE2EDuration="2.346644242s" podCreationTimestamp="2025-12-05 19:37:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:37:31.340112377 +0000 UTC m=+1430.221998372" watchObservedRunningTime="2025-12-05 19:37:31.346644242 +0000 UTC m=+1430.228530247" Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.352075 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"58815d17-89de-431e-89ac-4e344ca88e9e","Type":"ContainerStarted","Data":"f451e2bf1e01e88bc818c859b8ca8c39ed22450cff1d1a5ce6943450fd3c37e5"} Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.385906 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.438583 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="700dc7aa-a441-4419-90c8-ff6ec6d31f23" path="/var/lib/kubelet/pods/700dc7aa-a441-4419-90c8-ff6ec6d31f23/volumes" Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.439469 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.439494 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:31 crc kubenswrapper[4982]: I1205 19:37:31.726661 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-42bfv"] Dec 05 19:37:31 crc kubenswrapper[4982]: W1205 19:37:31.789560 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1fb8006_cfc6_412d_90be_9bb828949621.slice/crio-cc1cfd00a28206cb512c50ac8a0a398520cbcfa35d8a233a092fc8b9518ee017 WatchSource:0}: Error finding container cc1cfd00a28206cb512c50ac8a0a398520cbcfa35d8a233a092fc8b9518ee017: Status 404 returned error can't find the container with id cc1cfd00a28206cb512c50ac8a0a398520cbcfa35d8a233a092fc8b9518ee017 Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.382429 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"babd6ca2-04d1-4f51-aaa6-d621a339d799","Type":"ContainerStarted","Data":"d9bac3e5f2d1b22d0e329c78e366826b7aac621ec4c1b3a60a1400f94219ebe3"} Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.382804 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"babd6ca2-04d1-4f51-aaa6-d621a339d799","Type":"ContainerStarted","Data":"2137264af67483c80d4f3cd495c4c2397daed81b440588461706cb79a0baad63"} Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.383259 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.393413 4982 generic.go:334] "Generic (PLEG): container finished" podID="c78c0950-654a-40c4-8ae4-bf213130fbcf" containerID="4bf2f85f4611c96409523da9be9db5b5013983e1f2a528ce24c04aebeb2b2865" exitCode=0 Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.393514 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" event={"ID":"c78c0950-654a-40c4-8ae4-bf213130fbcf","Type":"ContainerDied","Data":"4bf2f85f4611c96409523da9be9db5b5013983e1f2a528ce24c04aebeb2b2865"} Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.400706 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-42bfv" event={"ID":"b1fb8006-cfc6-412d-90be-9bb828949621","Type":"ContainerStarted","Data":"699747e4c64acab7ad2c54377703b1769ead6d2cdd15a0ac89736e29c1fb8aca"} Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.400754 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-42bfv" event={"ID":"b1fb8006-cfc6-412d-90be-9bb828949621","Type":"ContainerStarted","Data":"cc1cfd00a28206cb512c50ac8a0a398520cbcfa35d8a233a092fc8b9518ee017"} Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.451309 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.009377104 podStartE2EDuration="2.451293467s" podCreationTimestamp="2025-12-05 19:37:30 +0000 UTC" firstStartedPulling="2025-12-05 19:37:31.459363655 +0000 UTC m=+1430.341249650" lastFinishedPulling="2025-12-05 19:37:31.901280018 +0000 UTC m=+1430.783166013" observedRunningTime="2025-12-05 19:37:32.416485746 +0000 UTC m=+1431.298371741" watchObservedRunningTime="2025-12-05 19:37:32.451293467 +0000 UTC m=+1431.333179462" Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.525723 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-42bfv" podStartSLOduration=2.52569853 podStartE2EDuration="2.52569853s" podCreationTimestamp="2025-12-05 19:37:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:37:32.472087973 +0000 UTC m=+1431.353973968" watchObservedRunningTime="2025-12-05 19:37:32.52569853 +0000 UTC m=+1431.407584525" Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.562434 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-p5gbn" podUID="e51a33e0-110e-405e-8979-eda2e527de19" containerName="registry-server" probeResult="failure" output=< Dec 05 19:37:32 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Dec 05 19:37:32 crc kubenswrapper[4982]: > Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.896907 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.898060 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="ceilometer-central-agent" containerID="cri-o://5d43d5c4f0b661a529792083f6092739009e4251e48eaf34bf2620faf1ad3415" gracePeriod=30 Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.898124 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="proxy-httpd" containerID="cri-o://4de546e0e7c9b8887cab9701ed9d3fcc84a83edc41d9821a8770a520a67cc4bd" gracePeriod=30 Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.898206 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="ceilometer-notification-agent" containerID="cri-o://d7e847b92efc445148f09b89bcadd95565514df5a01a9ee6627c70c144e95420" gracePeriod=30 Dec 05 19:37:32 crc kubenswrapper[4982]: I1205 19:37:32.898214 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="sg-core" containerID="cri-o://c7f8a8449c824c9c191ea4026acd2699c676cbc5e4f257d0151c7e77a1a60938" gracePeriod=30 Dec 05 19:37:33 crc kubenswrapper[4982]: I1205 19:37:33.431860 4982 generic.go:334] "Generic (PLEG): container finished" podID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerID="4de546e0e7c9b8887cab9701ed9d3fcc84a83edc41d9821a8770a520a67cc4bd" exitCode=0 Dec 05 19:37:33 crc kubenswrapper[4982]: I1205 19:37:33.432238 4982 generic.go:334] "Generic (PLEG): container finished" podID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerID="c7f8a8449c824c9c191ea4026acd2699c676cbc5e4f257d0151c7e77a1a60938" exitCode=2 Dec 05 19:37:33 crc kubenswrapper[4982]: I1205 19:37:33.432256 4982 generic.go:334] "Generic (PLEG): container finished" podID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerID="5d43d5c4f0b661a529792083f6092739009e4251e48eaf34bf2620faf1ad3415" exitCode=0 Dec 05 19:37:33 crc kubenswrapper[4982]: I1205 19:37:33.431944 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4d15752-5cc7-4fd1-a682-514b6353f725","Type":"ContainerDied","Data":"4de546e0e7c9b8887cab9701ed9d3fcc84a83edc41d9821a8770a520a67cc4bd"} Dec 05 19:37:33 crc kubenswrapper[4982]: I1205 19:37:33.432338 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4d15752-5cc7-4fd1-a682-514b6353f725","Type":"ContainerDied","Data":"c7f8a8449c824c9c191ea4026acd2699c676cbc5e4f257d0151c7e77a1a60938"} Dec 05 19:37:33 crc kubenswrapper[4982]: I1205 19:37:33.432354 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4d15752-5cc7-4fd1-a682-514b6353f725","Type":"ContainerDied","Data":"5d43d5c4f0b661a529792083f6092739009e4251e48eaf34bf2620faf1ad3415"} Dec 05 19:37:33 crc kubenswrapper[4982]: I1205 19:37:33.436990 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" event={"ID":"c78c0950-654a-40c4-8ae4-bf213130fbcf","Type":"ContainerStarted","Data":"5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6"} Dec 05 19:37:33 crc kubenswrapper[4982]: I1205 19:37:33.466624 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" podStartSLOduration=4.46660893 podStartE2EDuration="4.46660893s" podCreationTimestamp="2025-12-05 19:37:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:37:33.45948484 +0000 UTC m=+1432.341370835" watchObservedRunningTime="2025-12-05 19:37:33.46660893 +0000 UTC m=+1432.348494925" Dec 05 19:37:33 crc kubenswrapper[4982]: I1205 19:37:33.689852 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:33 crc kubenswrapper[4982]: I1205 19:37:33.701529 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 19:37:34 crc kubenswrapper[4982]: I1205 19:37:34.451003 4982 generic.go:334] "Generic (PLEG): container finished" podID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerID="d7e847b92efc445148f09b89bcadd95565514df5a01a9ee6627c70c144e95420" exitCode=0 Dec 05 19:37:34 crc kubenswrapper[4982]: I1205 19:37:34.451093 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4d15752-5cc7-4fd1-a682-514b6353f725","Type":"ContainerDied","Data":"d7e847b92efc445148f09b89bcadd95565514df5a01a9ee6627c70c144e95420"} Dec 05 19:37:34 crc kubenswrapper[4982]: I1205 19:37:34.451314 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.427405 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.499443 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4d15752-5cc7-4fd1-a682-514b6353f725","Type":"ContainerDied","Data":"f975babd9156d2d13a2b8434a063a698cbc7552f73c36a4f66b88f555a6a47e2"} Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.499508 4982 scope.go:117] "RemoveContainer" containerID="4de546e0e7c9b8887cab9701ed9d3fcc84a83edc41d9821a8770a520a67cc4bd" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.499925 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.537949 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4d15752-5cc7-4fd1-a682-514b6353f725-run-httpd\") pod \"b4d15752-5cc7-4fd1-a682-514b6353f725\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.538097 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-sg-core-conf-yaml\") pod \"b4d15752-5cc7-4fd1-a682-514b6353f725\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.538765 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4d15752-5cc7-4fd1-a682-514b6353f725-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b4d15752-5cc7-4fd1-a682-514b6353f725" (UID: "b4d15752-5cc7-4fd1-a682-514b6353f725"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.544170 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-scripts\") pod \"b4d15752-5cc7-4fd1-a682-514b6353f725\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.544253 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fjt6s\" (UniqueName: \"kubernetes.io/projected/b4d15752-5cc7-4fd1-a682-514b6353f725-kube-api-access-fjt6s\") pod \"b4d15752-5cc7-4fd1-a682-514b6353f725\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.544287 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-config-data\") pod \"b4d15752-5cc7-4fd1-a682-514b6353f725\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.544292 4982 scope.go:117] "RemoveContainer" containerID="c7f8a8449c824c9c191ea4026acd2699c676cbc5e4f257d0151c7e77a1a60938" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.544315 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-combined-ca-bundle\") pod \"b4d15752-5cc7-4fd1-a682-514b6353f725\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.544442 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4d15752-5cc7-4fd1-a682-514b6353f725-log-httpd\") pod \"b4d15752-5cc7-4fd1-a682-514b6353f725\" (UID: \"b4d15752-5cc7-4fd1-a682-514b6353f725\") " Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.547239 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4d15752-5cc7-4fd1-a682-514b6353f725-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.549317 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4d15752-5cc7-4fd1-a682-514b6353f725-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b4d15752-5cc7-4fd1-a682-514b6353f725" (UID: "b4d15752-5cc7-4fd1-a682-514b6353f725"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.554257 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-scripts" (OuterVolumeSpecName: "scripts") pod "b4d15752-5cc7-4fd1-a682-514b6353f725" (UID: "b4d15752-5cc7-4fd1-a682-514b6353f725"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.554611 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4d15752-5cc7-4fd1-a682-514b6353f725-kube-api-access-fjt6s" (OuterVolumeSpecName: "kube-api-access-fjt6s") pod "b4d15752-5cc7-4fd1-a682-514b6353f725" (UID: "b4d15752-5cc7-4fd1-a682-514b6353f725"). InnerVolumeSpecName "kube-api-access-fjt6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.598052 4982 scope.go:117] "RemoveContainer" containerID="d7e847b92efc445148f09b89bcadd95565514df5a01a9ee6627c70c144e95420" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.628926 4982 scope.go:117] "RemoveContainer" containerID="5d43d5c4f0b661a529792083f6092739009e4251e48eaf34bf2620faf1ad3415" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.649911 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.649943 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fjt6s\" (UniqueName: \"kubernetes.io/projected/b4d15752-5cc7-4fd1-a682-514b6353f725-kube-api-access-fjt6s\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.649955 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4d15752-5cc7-4fd1-a682-514b6353f725-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.693891 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b4d15752-5cc7-4fd1-a682-514b6353f725" (UID: "b4d15752-5cc7-4fd1-a682-514b6353f725"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.752715 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.829805 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4d15752-5cc7-4fd1-a682-514b6353f725" (UID: "b4d15752-5cc7-4fd1-a682-514b6353f725"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.855069 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.870125 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-config-data" (OuterVolumeSpecName: "config-data") pod "b4d15752-5cc7-4fd1-a682-514b6353f725" (UID: "b4d15752-5cc7-4fd1-a682-514b6353f725"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:35 crc kubenswrapper[4982]: I1205 19:37:35.957393 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4d15752-5cc7-4fd1-a682-514b6353f725-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.141801 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.158179 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.175775 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:37:36 crc kubenswrapper[4982]: E1205 19:37:36.176254 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="proxy-httpd" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.176270 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="proxy-httpd" Dec 05 19:37:36 crc kubenswrapper[4982]: E1205 19:37:36.176280 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="ceilometer-notification-agent" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.176288 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="ceilometer-notification-agent" Dec 05 19:37:36 crc kubenswrapper[4982]: E1205 19:37:36.176303 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="sg-core" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.176309 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="sg-core" Dec 05 19:37:36 crc kubenswrapper[4982]: E1205 19:37:36.176334 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="ceilometer-central-agent" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.176339 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="ceilometer-central-agent" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.176529 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="ceilometer-notification-agent" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.176547 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="ceilometer-central-agent" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.176560 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="sg-core" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.176575 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" containerName="proxy-httpd" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.178537 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.190035 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.190106 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.190319 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.191218 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.263826 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b27zq\" (UniqueName: \"kubernetes.io/projected/65085e69-df46-4bb8-a7cc-173d21c73088-kube-api-access-b27zq\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.263965 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65085e69-df46-4bb8-a7cc-173d21c73088-log-httpd\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.264071 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.264115 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-config-data\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.264141 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.264304 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.264328 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65085e69-df46-4bb8-a7cc-173d21c73088-run-httpd\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.264425 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-scripts\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.366241 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-scripts\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.366300 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b27zq\" (UniqueName: \"kubernetes.io/projected/65085e69-df46-4bb8-a7cc-173d21c73088-kube-api-access-b27zq\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.366361 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65085e69-df46-4bb8-a7cc-173d21c73088-log-httpd\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.366434 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.366457 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-config-data\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.366477 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.366516 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.366531 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65085e69-df46-4bb8-a7cc-173d21c73088-run-httpd\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.367060 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65085e69-df46-4bb8-a7cc-173d21c73088-run-httpd\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.367080 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65085e69-df46-4bb8-a7cc-173d21c73088-log-httpd\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.370669 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.370714 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-config-data\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.371664 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-scripts\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.371667 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.384193 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.388169 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b27zq\" (UniqueName: \"kubernetes.io/projected/65085e69-df46-4bb8-a7cc-173d21c73088-kube-api-access-b27zq\") pod \"ceilometer-0\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.504893 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.512805 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"58815d17-89de-431e-89ac-4e344ca88e9e","Type":"ContainerStarted","Data":"348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6"} Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.512847 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"58815d17-89de-431e-89ac-4e344ca88e9e","Type":"ContainerStarted","Data":"5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c"} Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.515518 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4","Type":"ContainerStarted","Data":"2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4"} Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.515552 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4","Type":"ContainerStarted","Data":"129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b"} Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.517468 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"268d0151-148e-46e8-9dbe-4227cdd3d0b3","Type":"ContainerStarted","Data":"c62a039f3dad185547c9010f5824e0196f4c0cc1d25f2b44148724aa74a2d6e3"} Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.520438 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"14f5ae40-0fa5-4c05-b172-3edd52b0b313","Type":"ContainerStarted","Data":"56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1"} Dec 05 19:37:36 crc kubenswrapper[4982]: I1205 19:37:36.545866 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.417010629 podStartE2EDuration="7.545849642s" podCreationTimestamp="2025-12-05 19:37:29 +0000 UTC" firstStartedPulling="2025-12-05 19:37:30.678236478 +0000 UTC m=+1429.560122473" lastFinishedPulling="2025-12-05 19:37:34.807075491 +0000 UTC m=+1433.688961486" observedRunningTime="2025-12-05 19:37:36.542849757 +0000 UTC m=+1435.424735772" watchObservedRunningTime="2025-12-05 19:37:36.545849642 +0000 UTC m=+1435.427735627" Dec 05 19:37:37 crc kubenswrapper[4982]: I1205 19:37:37.030229 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:37:37 crc kubenswrapper[4982]: I1205 19:37:37.429186 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4d15752-5cc7-4fd1-a682-514b6353f725" path="/var/lib/kubelet/pods/b4d15752-5cc7-4fd1-a682-514b6353f725/volumes" Dec 05 19:37:37 crc kubenswrapper[4982]: I1205 19:37:37.530347 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65085e69-df46-4bb8-a7cc-173d21c73088","Type":"ContainerStarted","Data":"15b028646d6afe6b21beddc617a089bf2109e0c5ff812f3a615504422858c3b3"} Dec 05 19:37:37 crc kubenswrapper[4982]: I1205 19:37:37.530515 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" containerName="nova-metadata-log" containerID="cri-o://129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b" gracePeriod=30 Dec 05 19:37:37 crc kubenswrapper[4982]: I1205 19:37:37.530594 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" containerName="nova-metadata-metadata" containerID="cri-o://2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4" gracePeriod=30 Dec 05 19:37:37 crc kubenswrapper[4982]: I1205 19:37:37.530632 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="14f5ae40-0fa5-4c05-b172-3edd52b0b313" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1" gracePeriod=30 Dec 05 19:37:37 crc kubenswrapper[4982]: I1205 19:37:37.561336 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=4.683308264 podStartE2EDuration="8.5613162s" podCreationTimestamp="2025-12-05 19:37:29 +0000 UTC" firstStartedPulling="2025-12-05 19:37:30.977783078 +0000 UTC m=+1429.859669073" lastFinishedPulling="2025-12-05 19:37:34.855791014 +0000 UTC m=+1433.737677009" observedRunningTime="2025-12-05 19:37:37.553239125 +0000 UTC m=+1436.435125140" watchObservedRunningTime="2025-12-05 19:37:37.5613162 +0000 UTC m=+1436.443202195" Dec 05 19:37:37 crc kubenswrapper[4982]: I1205 19:37:37.575112 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.8151664400000005 podStartE2EDuration="8.575089498s" podCreationTimestamp="2025-12-05 19:37:29 +0000 UTC" firstStartedPulling="2025-12-05 19:37:31.112585289 +0000 UTC m=+1429.994471284" lastFinishedPulling="2025-12-05 19:37:34.872508347 +0000 UTC m=+1433.754394342" observedRunningTime="2025-12-05 19:37:37.569306982 +0000 UTC m=+1436.451192987" watchObservedRunningTime="2025-12-05 19:37:37.575089498 +0000 UTC m=+1436.456975503" Dec 05 19:37:37 crc kubenswrapper[4982]: I1205 19:37:37.599547 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.835687421 podStartE2EDuration="8.599528257s" podCreationTimestamp="2025-12-05 19:37:29 +0000 UTC" firstStartedPulling="2025-12-05 19:37:31.112523058 +0000 UTC m=+1429.994409053" lastFinishedPulling="2025-12-05 19:37:34.876363894 +0000 UTC m=+1433.758249889" observedRunningTime="2025-12-05 19:37:37.589557495 +0000 UTC m=+1436.471443490" watchObservedRunningTime="2025-12-05 19:37:37.599528257 +0000 UTC m=+1436.481414252" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.245999 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.326519 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-combined-ca-bundle\") pod \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.326626 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-logs\") pod \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.326769 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmccn\" (UniqueName: \"kubernetes.io/projected/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-kube-api-access-lmccn\") pod \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.326858 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-config-data\") pod \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\" (UID: \"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4\") " Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.327035 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-logs" (OuterVolumeSpecName: "logs") pod "26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" (UID: "26fd4fe1-5d01-403f-aeb9-1bbb36f008e4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.327339 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.332286 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-kube-api-access-lmccn" (OuterVolumeSpecName: "kube-api-access-lmccn") pod "26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" (UID: "26fd4fe1-5d01-403f-aeb9-1bbb36f008e4"). InnerVolumeSpecName "kube-api-access-lmccn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.362950 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-config-data" (OuterVolumeSpecName: "config-data") pod "26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" (UID: "26fd4fe1-5d01-403f-aeb9-1bbb36f008e4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.363416 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" (UID: "26fd4fe1-5d01-403f-aeb9-1bbb36f008e4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.429740 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmccn\" (UniqueName: \"kubernetes.io/projected/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-kube-api-access-lmccn\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.429776 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.429787 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.545427 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65085e69-df46-4bb8-a7cc-173d21c73088","Type":"ContainerStarted","Data":"62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb"} Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.547511 4982 generic.go:334] "Generic (PLEG): container finished" podID="26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" containerID="2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4" exitCode=0 Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.547550 4982 generic.go:334] "Generic (PLEG): container finished" podID="26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" containerID="129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b" exitCode=143 Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.547582 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4","Type":"ContainerDied","Data":"2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4"} Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.547622 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4","Type":"ContainerDied","Data":"129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b"} Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.547643 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"26fd4fe1-5d01-403f-aeb9-1bbb36f008e4","Type":"ContainerDied","Data":"404983df7ccb1d1e408ab8d194a1d61c5a6f6c6dd1104d08f0f2dce94025a241"} Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.547670 4982 scope.go:117] "RemoveContainer" containerID="2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.547851 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.586363 4982 scope.go:117] "RemoveContainer" containerID="129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.630168 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.646873 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.665757 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:38 crc kubenswrapper[4982]: E1205 19:37:38.666245 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" containerName="nova-metadata-log" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.666260 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" containerName="nova-metadata-log" Dec 05 19:37:38 crc kubenswrapper[4982]: E1205 19:37:38.666298 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" containerName="nova-metadata-metadata" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.666305 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" containerName="nova-metadata-metadata" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.666498 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" containerName="nova-metadata-metadata" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.666513 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" containerName="nova-metadata-log" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.667597 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.669878 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.670327 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.693941 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.711363 4982 scope.go:117] "RemoveContainer" containerID="2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4" Dec 05 19:37:38 crc kubenswrapper[4982]: E1205 19:37:38.711829 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4\": container with ID starting with 2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4 not found: ID does not exist" containerID="2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.711865 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4"} err="failed to get container status \"2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4\": rpc error: code = NotFound desc = could not find container \"2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4\": container with ID starting with 2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4 not found: ID does not exist" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.711892 4982 scope.go:117] "RemoveContainer" containerID="129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b" Dec 05 19:37:38 crc kubenswrapper[4982]: E1205 19:37:38.712131 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b\": container with ID starting with 129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b not found: ID does not exist" containerID="129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.712165 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b"} err="failed to get container status \"129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b\": rpc error: code = NotFound desc = could not find container \"129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b\": container with ID starting with 129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b not found: ID does not exist" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.712181 4982 scope.go:117] "RemoveContainer" containerID="2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.712355 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4"} err="failed to get container status \"2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4\": rpc error: code = NotFound desc = could not find container \"2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4\": container with ID starting with 2067ef6c0a70e8b7e691928613be962a062bfbaf62fa0e1ef9181fb26ad782f4 not found: ID does not exist" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.712368 4982 scope.go:117] "RemoveContainer" containerID="129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.712523 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b"} err="failed to get container status \"129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b\": rpc error: code = NotFound desc = could not find container \"129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b\": container with ID starting with 129e2d649d8046097d526ac54aa323fde57dbc9302adf822d4e6f38bc5aa057b not found: ID does not exist" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.735397 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.735450 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-config-data\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.735473 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.735550 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44691fd6-915b-4927-b094-50b544fd638e-logs\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.735636 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8rbcn\" (UniqueName: \"kubernetes.io/projected/44691fd6-915b-4927-b094-50b544fd638e-kube-api-access-8rbcn\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.837486 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44691fd6-915b-4927-b094-50b544fd638e-logs\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.837818 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8rbcn\" (UniqueName: \"kubernetes.io/projected/44691fd6-915b-4927-b094-50b544fd638e-kube-api-access-8rbcn\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.837945 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44691fd6-915b-4927-b094-50b544fd638e-logs\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.837953 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.838045 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-config-data\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.838075 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.843764 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.844884 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-config-data\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.847928 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:38 crc kubenswrapper[4982]: I1205 19:37:38.858691 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8rbcn\" (UniqueName: \"kubernetes.io/projected/44691fd6-915b-4927-b094-50b544fd638e-kube-api-access-8rbcn\") pod \"nova-metadata-0\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " pod="openstack/nova-metadata-0" Dec 05 19:37:39 crc kubenswrapper[4982]: I1205 19:37:39.006967 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:37:39 crc kubenswrapper[4982]: I1205 19:37:39.406704 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26fd4fe1-5d01-403f-aeb9-1bbb36f008e4" path="/var/lib/kubelet/pods/26fd4fe1-5d01-403f-aeb9-1bbb36f008e4/volumes" Dec 05 19:37:39 crc kubenswrapper[4982]: W1205 19:37:39.484738 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod44691fd6_915b_4927_b094_50b544fd638e.slice/crio-ca2029e78163012264150e7be9793c6c092af1afec8b95b9d3715132af379ab7 WatchSource:0}: Error finding container ca2029e78163012264150e7be9793c6c092af1afec8b95b9d3715132af379ab7: Status 404 returned error can't find the container with id ca2029e78163012264150e7be9793c6c092af1afec8b95b9d3715132af379ab7 Dec 05 19:37:39 crc kubenswrapper[4982]: I1205 19:37:39.485114 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:39 crc kubenswrapper[4982]: I1205 19:37:39.574966 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"44691fd6-915b-4927-b094-50b544fd638e","Type":"ContainerStarted","Data":"ca2029e78163012264150e7be9793c6c092af1afec8b95b9d3715132af379ab7"} Dec 05 19:37:39 crc kubenswrapper[4982]: I1205 19:37:39.587990 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65085e69-df46-4bb8-a7cc-173d21c73088","Type":"ContainerStarted","Data":"457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce"} Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.037491 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.055698 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.055748 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.105043 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.210268 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.210325 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.437542 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.503748 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86d9875b97-z4jwg"] Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.503984 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" podUID="ba94fb6b-e425-465d-a56d-227a4f96c75a" containerName="dnsmasq-dns" containerID="cri-o://0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419" gracePeriod=10 Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.633981 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65085e69-df46-4bb8-a7cc-173d21c73088","Type":"ContainerStarted","Data":"55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c"} Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.643885 4982 generic.go:334] "Generic (PLEG): container finished" podID="b1fb8006-cfc6-412d-90be-9bb828949621" containerID="699747e4c64acab7ad2c54377703b1769ead6d2cdd15a0ac89736e29c1fb8aca" exitCode=0 Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.643964 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-42bfv" event={"ID":"b1fb8006-cfc6-412d-90be-9bb828949621","Type":"ContainerDied","Data":"699747e4c64acab7ad2c54377703b1769ead6d2cdd15a0ac89736e29c1fb8aca"} Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.686843 4982 generic.go:334] "Generic (PLEG): container finished" podID="afa9e944-29e0-416e-8c19-f3b9786c8464" containerID="8b1dcee64e05cb676a934e0168f923dfd361a873491f0b05eda5d10f9b9c0fd4" exitCode=0 Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.686967 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-l2zcs" event={"ID":"afa9e944-29e0-416e-8c19-f3b9786c8464","Type":"ContainerDied","Data":"8b1dcee64e05cb676a934e0168f923dfd361a873491f0b05eda5d10f9b9c0fd4"} Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.697758 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"44691fd6-915b-4927-b094-50b544fd638e","Type":"ContainerStarted","Data":"0f4346dfca4538237293c8f1ecb509ae836d07b88fb6c3a3919caded08e79ff4"} Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.697821 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"44691fd6-915b-4927-b094-50b544fd638e","Type":"ContainerStarted","Data":"2d25f9fe744dcb293634dbca72e4518e44069adf613bf28e627212a8945c9865"} Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.735238 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.735202537 podStartE2EDuration="2.735202537s" podCreationTimestamp="2025-12-05 19:37:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:37:40.727695247 +0000 UTC m=+1439.609581252" watchObservedRunningTime="2025-12-05 19:37:40.735202537 +0000 UTC m=+1439.617088532" Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.738662 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 19:37:40 crc kubenswrapper[4982]: I1205 19:37:40.782997 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.178253 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.294436 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="58815d17-89de-431e-89ac-4e344ca88e9e" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.213:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.294573 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="58815d17-89de-431e-89ac-4e344ca88e9e" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.213:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.303896 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-ovsdbserver-nb\") pod \"ba94fb6b-e425-465d-a56d-227a4f96c75a\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.304031 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-ovsdbserver-sb\") pod \"ba94fb6b-e425-465d-a56d-227a4f96c75a\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.304169 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-svc\") pod \"ba94fb6b-e425-465d-a56d-227a4f96c75a\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.304268 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-config\") pod \"ba94fb6b-e425-465d-a56d-227a4f96c75a\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.304315 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcblx\" (UniqueName: \"kubernetes.io/projected/ba94fb6b-e425-465d-a56d-227a4f96c75a-kube-api-access-qcblx\") pod \"ba94fb6b-e425-465d-a56d-227a4f96c75a\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.304384 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-swift-storage-0\") pod \"ba94fb6b-e425-465d-a56d-227a4f96c75a\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.309676 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba94fb6b-e425-465d-a56d-227a4f96c75a-kube-api-access-qcblx" (OuterVolumeSpecName: "kube-api-access-qcblx") pod "ba94fb6b-e425-465d-a56d-227a4f96c75a" (UID: "ba94fb6b-e425-465d-a56d-227a4f96c75a"). InnerVolumeSpecName "kube-api-access-qcblx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.406750 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcblx\" (UniqueName: \"kubernetes.io/projected/ba94fb6b-e425-465d-a56d-227a4f96c75a-kube-api-access-qcblx\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.416996 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ba94fb6b-e425-465d-a56d-227a4f96c75a" (UID: "ba94fb6b-e425-465d-a56d-227a4f96c75a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.432909 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-config" (OuterVolumeSpecName: "config") pod "ba94fb6b-e425-465d-a56d-227a4f96c75a" (UID: "ba94fb6b-e425-465d-a56d-227a4f96c75a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.456258 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ba94fb6b-e425-465d-a56d-227a4f96c75a" (UID: "ba94fb6b-e425-465d-a56d-227a4f96c75a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.490378 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ba94fb6b-e425-465d-a56d-227a4f96c75a" (UID: "ba94fb6b-e425-465d-a56d-227a4f96c75a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.508680 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ba94fb6b-e425-465d-a56d-227a4f96c75a" (UID: "ba94fb6b-e425-465d-a56d-227a4f96c75a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.509153 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-svc\") pod \"ba94fb6b-e425-465d-a56d-227a4f96c75a\" (UID: \"ba94fb6b-e425-465d-a56d-227a4f96c75a\") " Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.509961 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.511613 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.511708 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.511777 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:41 crc kubenswrapper[4982]: W1205 19:37:41.509306 4982 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/ba94fb6b-e425-465d-a56d-227a4f96c75a/volumes/kubernetes.io~configmap/dns-svc Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.511898 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ba94fb6b-e425-465d-a56d-227a4f96c75a" (UID: "ba94fb6b-e425-465d-a56d-227a4f96c75a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.614611 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ba94fb6b-e425-465d-a56d-227a4f96c75a-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.709597 4982 generic.go:334] "Generic (PLEG): container finished" podID="ba94fb6b-e425-465d-a56d-227a4f96c75a" containerID="0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419" exitCode=0 Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.709692 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" event={"ID":"ba94fb6b-e425-465d-a56d-227a4f96c75a","Type":"ContainerDied","Data":"0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419"} Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.709723 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.709747 4982 scope.go:117] "RemoveContainer" containerID="0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.709731 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d9875b97-z4jwg" event={"ID":"ba94fb6b-e425-465d-a56d-227a4f96c75a","Type":"ContainerDied","Data":"ac68896e67bfcbdb6adae52641e11f33d0984a85384c5242b860573bed6173d7"} Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.713814 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65085e69-df46-4bb8-a7cc-173d21c73088","Type":"ContainerStarted","Data":"72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a"} Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.714275 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.759908 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.042369513 podStartE2EDuration="5.759886587s" podCreationTimestamp="2025-12-05 19:37:36 +0000 UTC" firstStartedPulling="2025-12-05 19:37:37.047245081 +0000 UTC m=+1435.929131076" lastFinishedPulling="2025-12-05 19:37:40.764762155 +0000 UTC m=+1439.646648150" observedRunningTime="2025-12-05 19:37:41.744399325 +0000 UTC m=+1440.626285330" watchObservedRunningTime="2025-12-05 19:37:41.759886587 +0000 UTC m=+1440.641772582" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.770004 4982 scope.go:117] "RemoveContainer" containerID="ec5f9e8385792e6cd70589a950c671e20b7e072f6474788a5f20f5870e53896c" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.775454 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86d9875b97-z4jwg"] Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.785120 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86d9875b97-z4jwg"] Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.812434 4982 scope.go:117] "RemoveContainer" containerID="0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419" Dec 05 19:37:41 crc kubenswrapper[4982]: E1205 19:37:41.812992 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419\": container with ID starting with 0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419 not found: ID does not exist" containerID="0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.813042 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419"} err="failed to get container status \"0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419\": rpc error: code = NotFound desc = could not find container \"0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419\": container with ID starting with 0e089a87a2c16f5830528dad96b76ff95584ac167e011ef2fb8c6f817f0c0419 not found: ID does not exist" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.813075 4982 scope.go:117] "RemoveContainer" containerID="ec5f9e8385792e6cd70589a950c671e20b7e072f6474788a5f20f5870e53896c" Dec 05 19:37:41 crc kubenswrapper[4982]: E1205 19:37:41.816537 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec5f9e8385792e6cd70589a950c671e20b7e072f6474788a5f20f5870e53896c\": container with ID starting with ec5f9e8385792e6cd70589a950c671e20b7e072f6474788a5f20f5870e53896c not found: ID does not exist" containerID="ec5f9e8385792e6cd70589a950c671e20b7e072f6474788a5f20f5870e53896c" Dec 05 19:37:41 crc kubenswrapper[4982]: I1205 19:37:41.816581 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec5f9e8385792e6cd70589a950c671e20b7e072f6474788a5f20f5870e53896c"} err="failed to get container status \"ec5f9e8385792e6cd70589a950c671e20b7e072f6474788a5f20f5870e53896c\": rpc error: code = NotFound desc = could not find container \"ec5f9e8385792e6cd70589a950c671e20b7e072f6474788a5f20f5870e53896c\": container with ID starting with ec5f9e8385792e6cd70589a950c671e20b7e072f6474788a5f20f5870e53896c not found: ID does not exist" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.291107 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.423851 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.462755 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-scripts\") pod \"b1fb8006-cfc6-412d-90be-9bb828949621\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.462816 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-57mlz\" (UniqueName: \"kubernetes.io/projected/b1fb8006-cfc6-412d-90be-9bb828949621-kube-api-access-57mlz\") pod \"b1fb8006-cfc6-412d-90be-9bb828949621\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.462921 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-combined-ca-bundle\") pod \"b1fb8006-cfc6-412d-90be-9bb828949621\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.463016 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-config-data\") pod \"b1fb8006-cfc6-412d-90be-9bb828949621\" (UID: \"b1fb8006-cfc6-412d-90be-9bb828949621\") " Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.474079 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-scripts" (OuterVolumeSpecName: "scripts") pod "b1fb8006-cfc6-412d-90be-9bb828949621" (UID: "b1fb8006-cfc6-412d-90be-9bb828949621"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.479856 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1fb8006-cfc6-412d-90be-9bb828949621-kube-api-access-57mlz" (OuterVolumeSpecName: "kube-api-access-57mlz") pod "b1fb8006-cfc6-412d-90be-9bb828949621" (UID: "b1fb8006-cfc6-412d-90be-9bb828949621"). InnerVolumeSpecName "kube-api-access-57mlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.481719 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-p5gbn" podUID="e51a33e0-110e-405e-8979-eda2e527de19" containerName="registry-server" probeResult="failure" output=< Dec 05 19:37:42 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Dec 05 19:37:42 crc kubenswrapper[4982]: > Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.506376 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-config-data" (OuterVolumeSpecName: "config-data") pod "b1fb8006-cfc6-412d-90be-9bb828949621" (UID: "b1fb8006-cfc6-412d-90be-9bb828949621"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.519730 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1fb8006-cfc6-412d-90be-9bb828949621" (UID: "b1fb8006-cfc6-412d-90be-9bb828949621"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.559265 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.559316 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.559381 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.560190 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d120ed4dd81891b8a49ab0c0d10c2698410eb2dc25101b25c8e0a67336b5afef"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.560237 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://d120ed4dd81891b8a49ab0c0d10c2698410eb2dc25101b25c8e0a67336b5afef" gracePeriod=600 Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.565926 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br8qt\" (UniqueName: \"kubernetes.io/projected/afa9e944-29e0-416e-8c19-f3b9786c8464-kube-api-access-br8qt\") pod \"afa9e944-29e0-416e-8c19-f3b9786c8464\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.566049 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-scripts\") pod \"afa9e944-29e0-416e-8c19-f3b9786c8464\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.566222 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-config-data\") pod \"afa9e944-29e0-416e-8c19-f3b9786c8464\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.566321 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-combined-ca-bundle\") pod \"afa9e944-29e0-416e-8c19-f3b9786c8464\" (UID: \"afa9e944-29e0-416e-8c19-f3b9786c8464\") " Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.566796 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.566807 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-57mlz\" (UniqueName: \"kubernetes.io/projected/b1fb8006-cfc6-412d-90be-9bb828949621-kube-api-access-57mlz\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.566817 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.566827 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1fb8006-cfc6-412d-90be-9bb828949621-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.571323 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-scripts" (OuterVolumeSpecName: "scripts") pod "afa9e944-29e0-416e-8c19-f3b9786c8464" (UID: "afa9e944-29e0-416e-8c19-f3b9786c8464"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.573310 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afa9e944-29e0-416e-8c19-f3b9786c8464-kube-api-access-br8qt" (OuterVolumeSpecName: "kube-api-access-br8qt") pod "afa9e944-29e0-416e-8c19-f3b9786c8464" (UID: "afa9e944-29e0-416e-8c19-f3b9786c8464"). InnerVolumeSpecName "kube-api-access-br8qt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.605081 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-config-data" (OuterVolumeSpecName: "config-data") pod "afa9e944-29e0-416e-8c19-f3b9786c8464" (UID: "afa9e944-29e0-416e-8c19-f3b9786c8464"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.607414 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "afa9e944-29e0-416e-8c19-f3b9786c8464" (UID: "afa9e944-29e0-416e-8c19-f3b9786c8464"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.670762 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.670798 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br8qt\" (UniqueName: \"kubernetes.io/projected/afa9e944-29e0-416e-8c19-f3b9786c8464-kube-api-access-br8qt\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.670807 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.670815 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/afa9e944-29e0-416e-8c19-f3b9786c8464-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.745606 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 19:37:42 crc kubenswrapper[4982]: E1205 19:37:42.746315 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afa9e944-29e0-416e-8c19-f3b9786c8464" containerName="nova-manage" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.746338 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="afa9e944-29e0-416e-8c19-f3b9786c8464" containerName="nova-manage" Dec 05 19:37:42 crc kubenswrapper[4982]: E1205 19:37:42.746353 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1fb8006-cfc6-412d-90be-9bb828949621" containerName="nova-cell1-conductor-db-sync" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.746359 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1fb8006-cfc6-412d-90be-9bb828949621" containerName="nova-cell1-conductor-db-sync" Dec 05 19:37:42 crc kubenswrapper[4982]: E1205 19:37:42.746366 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba94fb6b-e425-465d-a56d-227a4f96c75a" containerName="init" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.746372 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba94fb6b-e425-465d-a56d-227a4f96c75a" containerName="init" Dec 05 19:37:42 crc kubenswrapper[4982]: E1205 19:37:42.746382 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba94fb6b-e425-465d-a56d-227a4f96c75a" containerName="dnsmasq-dns" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.746388 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba94fb6b-e425-465d-a56d-227a4f96c75a" containerName="dnsmasq-dns" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.746563 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1fb8006-cfc6-412d-90be-9bb828949621" containerName="nova-cell1-conductor-db-sync" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.746586 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="afa9e944-29e0-416e-8c19-f3b9786c8464" containerName="nova-manage" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.746596 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba94fb6b-e425-465d-a56d-227a4f96c75a" containerName="dnsmasq-dns" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.747351 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.756895 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="d120ed4dd81891b8a49ab0c0d10c2698410eb2dc25101b25c8e0a67336b5afef" exitCode=0 Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.756959 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"d120ed4dd81891b8a49ab0c0d10c2698410eb2dc25101b25c8e0a67336b5afef"} Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.756992 4982 scope.go:117] "RemoveContainer" containerID="1a1fd81965ac1ad943b31d17af30468278dd74e344d34225c855144a8dd5abed" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.761597 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-l2zcs" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.761658 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-l2zcs" event={"ID":"afa9e944-29e0-416e-8c19-f3b9786c8464","Type":"ContainerDied","Data":"fb04b6c8a146b5611bfbc20556c2b59726f25800d204e83132ec76da7c5829c6"} Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.761694 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb04b6c8a146b5611bfbc20556c2b59726f25800d204e83132ec76da7c5829c6" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.774067 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-42bfv" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.777432 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-42bfv" event={"ID":"b1fb8006-cfc6-412d-90be-9bb828949621","Type":"ContainerDied","Data":"cc1cfd00a28206cb512c50ac8a0a398520cbcfa35d8a233a092fc8b9518ee017"} Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.777532 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc1cfd00a28206cb512c50ac8a0a398520cbcfa35d8a233a092fc8b9518ee017" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.812423 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.875653 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1112e38-5f73-4a31-8f6d-9b03a9148c02-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b1112e38-5f73-4a31-8f6d-9b03a9148c02\") " pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.875695 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8bt6s\" (UniqueName: \"kubernetes.io/projected/b1112e38-5f73-4a31-8f6d-9b03a9148c02-kube-api-access-8bt6s\") pod \"nova-cell1-conductor-0\" (UID: \"b1112e38-5f73-4a31-8f6d-9b03a9148c02\") " pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.875720 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1112e38-5f73-4a31-8f6d-9b03a9148c02-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b1112e38-5f73-4a31-8f6d-9b03a9148c02\") " pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.909607 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.909840 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="58815d17-89de-431e-89ac-4e344ca88e9e" containerName="nova-api-log" containerID="cri-o://5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c" gracePeriod=30 Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.910267 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="58815d17-89de-431e-89ac-4e344ca88e9e" containerName="nova-api-api" containerID="cri-o://348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6" gracePeriod=30 Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.920581 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.920803 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="268d0151-148e-46e8-9dbe-4227cdd3d0b3" containerName="nova-scheduler-scheduler" containerID="cri-o://c62a039f3dad185547c9010f5824e0196f4c0cc1d25f2b44148724aa74a2d6e3" gracePeriod=30 Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.941659 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.941959 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="44691fd6-915b-4927-b094-50b544fd638e" containerName="nova-metadata-log" containerID="cri-o://2d25f9fe744dcb293634dbca72e4518e44069adf613bf28e627212a8945c9865" gracePeriod=30 Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.942221 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="44691fd6-915b-4927-b094-50b544fd638e" containerName="nova-metadata-metadata" containerID="cri-o://0f4346dfca4538237293c8f1ecb509ae836d07b88fb6c3a3919caded08e79ff4" gracePeriod=30 Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.982536 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1112e38-5f73-4a31-8f6d-9b03a9148c02-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b1112e38-5f73-4a31-8f6d-9b03a9148c02\") " pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.982589 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8bt6s\" (UniqueName: \"kubernetes.io/projected/b1112e38-5f73-4a31-8f6d-9b03a9148c02-kube-api-access-8bt6s\") pod \"nova-cell1-conductor-0\" (UID: \"b1112e38-5f73-4a31-8f6d-9b03a9148c02\") " pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.982614 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1112e38-5f73-4a31-8f6d-9b03a9148c02-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b1112e38-5f73-4a31-8f6d-9b03a9148c02\") " pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.987598 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1112e38-5f73-4a31-8f6d-9b03a9148c02-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"b1112e38-5f73-4a31-8f6d-9b03a9148c02\") " pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.991677 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1112e38-5f73-4a31-8f6d-9b03a9148c02-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"b1112e38-5f73-4a31-8f6d-9b03a9148c02\") " pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:42 crc kubenswrapper[4982]: I1205 19:37:42.999589 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8bt6s\" (UniqueName: \"kubernetes.io/projected/b1112e38-5f73-4a31-8f6d-9b03a9148c02-kube-api-access-8bt6s\") pod \"nova-cell1-conductor-0\" (UID: \"b1112e38-5f73-4a31-8f6d-9b03a9148c02\") " pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.066950 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.405882 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba94fb6b-e425-465d-a56d-227a4f96c75a" path="/var/lib/kubelet/pods/ba94fb6b-e425-465d-a56d-227a4f96c75a/volumes" Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.587644 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.810587 4982 generic.go:334] "Generic (PLEG): container finished" podID="44691fd6-915b-4927-b094-50b544fd638e" containerID="0f4346dfca4538237293c8f1ecb509ae836d07b88fb6c3a3919caded08e79ff4" exitCode=0 Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.810616 4982 generic.go:334] "Generic (PLEG): container finished" podID="44691fd6-915b-4927-b094-50b544fd638e" containerID="2d25f9fe744dcb293634dbca72e4518e44069adf613bf28e627212a8945c9865" exitCode=143 Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.810688 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"44691fd6-915b-4927-b094-50b544fd638e","Type":"ContainerDied","Data":"0f4346dfca4538237293c8f1ecb509ae836d07b88fb6c3a3919caded08e79ff4"} Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.810714 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"44691fd6-915b-4927-b094-50b544fd638e","Type":"ContainerDied","Data":"2d25f9fe744dcb293634dbca72e4518e44069adf613bf28e627212a8945c9865"} Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.810726 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"44691fd6-915b-4927-b094-50b544fd638e","Type":"ContainerDied","Data":"ca2029e78163012264150e7be9793c6c092af1afec8b95b9d3715132af379ab7"} Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.810734 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca2029e78163012264150e7be9793c6c092af1afec8b95b9d3715132af379ab7" Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.812558 4982 generic.go:334] "Generic (PLEG): container finished" podID="58815d17-89de-431e-89ac-4e344ca88e9e" containerID="5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c" exitCode=143 Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.812611 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"58815d17-89de-431e-89ac-4e344ca88e9e","Type":"ContainerDied","Data":"5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c"} Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.820234 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313"} Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.825087 4982 generic.go:334] "Generic (PLEG): container finished" podID="268d0151-148e-46e8-9dbe-4227cdd3d0b3" containerID="c62a039f3dad185547c9010f5824e0196f4c0cc1d25f2b44148724aa74a2d6e3" exitCode=0 Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.825197 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"268d0151-148e-46e8-9dbe-4227cdd3d0b3","Type":"ContainerDied","Data":"c62a039f3dad185547c9010f5824e0196f4c0cc1d25f2b44148724aa74a2d6e3"} Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.833306 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b1112e38-5f73-4a31-8f6d-9b03a9148c02","Type":"ContainerStarted","Data":"5ae6c011a551175c1648df906d66d0509125c25163a94aa8af7c5762346c0d9f"} Dec 05 19:37:43 crc kubenswrapper[4982]: I1205 19:37:43.893833 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.007176 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-nova-metadata-tls-certs\") pod \"44691fd6-915b-4927-b094-50b544fd638e\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.007365 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8rbcn\" (UniqueName: \"kubernetes.io/projected/44691fd6-915b-4927-b094-50b544fd638e-kube-api-access-8rbcn\") pod \"44691fd6-915b-4927-b094-50b544fd638e\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.007589 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44691fd6-915b-4927-b094-50b544fd638e-logs\") pod \"44691fd6-915b-4927-b094-50b544fd638e\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.007631 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-config-data\") pod \"44691fd6-915b-4927-b094-50b544fd638e\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.007680 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-combined-ca-bundle\") pod \"44691fd6-915b-4927-b094-50b544fd638e\" (UID: \"44691fd6-915b-4927-b094-50b544fd638e\") " Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.010222 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/44691fd6-915b-4927-b094-50b544fd638e-logs" (OuterVolumeSpecName: "logs") pod "44691fd6-915b-4927-b094-50b544fd638e" (UID: "44691fd6-915b-4927-b094-50b544fd638e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.029076 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44691fd6-915b-4927-b094-50b544fd638e-kube-api-access-8rbcn" (OuterVolumeSpecName: "kube-api-access-8rbcn") pod "44691fd6-915b-4927-b094-50b544fd638e" (UID: "44691fd6-915b-4927-b094-50b544fd638e"). InnerVolumeSpecName "kube-api-access-8rbcn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.051376 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "44691fd6-915b-4927-b094-50b544fd638e" (UID: "44691fd6-915b-4927-b094-50b544fd638e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.052540 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-config-data" (OuterVolumeSpecName: "config-data") pod "44691fd6-915b-4927-b094-50b544fd638e" (UID: "44691fd6-915b-4927-b094-50b544fd638e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.095072 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.110371 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8rbcn\" (UniqueName: \"kubernetes.io/projected/44691fd6-915b-4927-b094-50b544fd638e-kube-api-access-8rbcn\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.110400 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/44691fd6-915b-4927-b094-50b544fd638e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.110409 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.110417 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.110644 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "44691fd6-915b-4927-b094-50b544fd638e" (UID: "44691fd6-915b-4927-b094-50b544fd638e"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.211462 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/268d0151-148e-46e8-9dbe-4227cdd3d0b3-combined-ca-bundle\") pod \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\" (UID: \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\") " Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.211519 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/268d0151-148e-46e8-9dbe-4227cdd3d0b3-config-data\") pod \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\" (UID: \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\") " Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.211616 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6bmfd\" (UniqueName: \"kubernetes.io/projected/268d0151-148e-46e8-9dbe-4227cdd3d0b3-kube-api-access-6bmfd\") pod \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\" (UID: \"268d0151-148e-46e8-9dbe-4227cdd3d0b3\") " Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.212104 4982 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/44691fd6-915b-4927-b094-50b544fd638e-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.215352 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/268d0151-148e-46e8-9dbe-4227cdd3d0b3-kube-api-access-6bmfd" (OuterVolumeSpecName: "kube-api-access-6bmfd") pod "268d0151-148e-46e8-9dbe-4227cdd3d0b3" (UID: "268d0151-148e-46e8-9dbe-4227cdd3d0b3"). InnerVolumeSpecName "kube-api-access-6bmfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.246787 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/268d0151-148e-46e8-9dbe-4227cdd3d0b3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "268d0151-148e-46e8-9dbe-4227cdd3d0b3" (UID: "268d0151-148e-46e8-9dbe-4227cdd3d0b3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.252589 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/268d0151-148e-46e8-9dbe-4227cdd3d0b3-config-data" (OuterVolumeSpecName: "config-data") pod "268d0151-148e-46e8-9dbe-4227cdd3d0b3" (UID: "268d0151-148e-46e8-9dbe-4227cdd3d0b3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.313629 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/268d0151-148e-46e8-9dbe-4227cdd3d0b3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.313671 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/268d0151-148e-46e8-9dbe-4227cdd3d0b3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.313683 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6bmfd\" (UniqueName: \"kubernetes.io/projected/268d0151-148e-46e8-9dbe-4227cdd3d0b3-kube-api-access-6bmfd\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.852523 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"b1112e38-5f73-4a31-8f6d-9b03a9148c02","Type":"ContainerStarted","Data":"cd3f77f2c44a078251722902f54bc2aaa98b098bbcf06c1134092c30b11f335d"} Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.853756 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.856177 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.856826 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.860545 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"268d0151-148e-46e8-9dbe-4227cdd3d0b3","Type":"ContainerDied","Data":"214488a7c39f40b7514ea11645be14dd3941b12570c713898b09f1ff96a761cc"} Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.860643 4982 scope.go:117] "RemoveContainer" containerID="c62a039f3dad185547c9010f5824e0196f4c0cc1d25f2b44148724aa74a2d6e3" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.919767 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.91974842 podStartE2EDuration="2.91974842s" podCreationTimestamp="2025-12-05 19:37:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:37:44.873575032 +0000 UTC m=+1443.755461017" watchObservedRunningTime="2025-12-05 19:37:44.91974842 +0000 UTC m=+1443.801634415" Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.978854 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:37:44 crc kubenswrapper[4982]: I1205 19:37:44.998936 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.010189 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:37:45 crc kubenswrapper[4982]: E1205 19:37:45.010624 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44691fd6-915b-4927-b094-50b544fd638e" containerName="nova-metadata-log" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.010637 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="44691fd6-915b-4927-b094-50b544fd638e" containerName="nova-metadata-log" Dec 05 19:37:45 crc kubenswrapper[4982]: E1205 19:37:45.010657 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44691fd6-915b-4927-b094-50b544fd638e" containerName="nova-metadata-metadata" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.010663 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="44691fd6-915b-4927-b094-50b544fd638e" containerName="nova-metadata-metadata" Dec 05 19:37:45 crc kubenswrapper[4982]: E1205 19:37:45.010682 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="268d0151-148e-46e8-9dbe-4227cdd3d0b3" containerName="nova-scheduler-scheduler" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.010689 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="268d0151-148e-46e8-9dbe-4227cdd3d0b3" containerName="nova-scheduler-scheduler" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.010876 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="268d0151-148e-46e8-9dbe-4227cdd3d0b3" containerName="nova-scheduler-scheduler" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.010886 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="44691fd6-915b-4927-b094-50b544fd638e" containerName="nova-metadata-log" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.010897 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="44691fd6-915b-4927-b094-50b544fd638e" containerName="nova-metadata-metadata" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.011641 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.016628 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.018728 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.031951 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.041708 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.050012 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.051794 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.054159 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.054494 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.059779 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.133827 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18d28c18-728c-4040-8ce6-0d8c15c3f45a-config-data\") pod \"nova-scheduler-0\" (UID: \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.134424 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzm2n\" (UniqueName: \"kubernetes.io/projected/18d28c18-728c-4040-8ce6-0d8c15c3f45a-kube-api-access-kzm2n\") pod \"nova-scheduler-0\" (UID: \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.135238 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18d28c18-728c-4040-8ce6-0d8c15c3f45a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.237105 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzm2n\" (UniqueName: \"kubernetes.io/projected/18d28c18-728c-4040-8ce6-0d8c15c3f45a-kube-api-access-kzm2n\") pod \"nova-scheduler-0\" (UID: \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.237180 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-config-data\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.237254 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18d28c18-728c-4040-8ce6-0d8c15c3f45a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.237912 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.237973 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.238066 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18d28c18-728c-4040-8ce6-0d8c15c3f45a-config-data\") pod \"nova-scheduler-0\" (UID: \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.238110 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mrb9\" (UniqueName: \"kubernetes.io/projected/e4dd66bd-d882-4475-9eb3-eb70e9081b59-kube-api-access-6mrb9\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.238195 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4dd66bd-d882-4475-9eb3-eb70e9081b59-logs\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.243216 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18d28c18-728c-4040-8ce6-0d8c15c3f45a-config-data\") pod \"nova-scheduler-0\" (UID: \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.243785 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18d28c18-728c-4040-8ce6-0d8c15c3f45a-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.261853 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzm2n\" (UniqueName: \"kubernetes.io/projected/18d28c18-728c-4040-8ce6-0d8c15c3f45a-kube-api-access-kzm2n\") pod \"nova-scheduler-0\" (UID: \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\") " pod="openstack/nova-scheduler-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.340524 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.340607 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mrb9\" (UniqueName: \"kubernetes.io/projected/e4dd66bd-d882-4475-9eb3-eb70e9081b59-kube-api-access-6mrb9\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.340657 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4dd66bd-d882-4475-9eb3-eb70e9081b59-logs\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.340701 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.340765 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-config-data\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.340828 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.341438 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4dd66bd-d882-4475-9eb3-eb70e9081b59-logs\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.344066 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.345761 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-config-data\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.356426 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.364260 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mrb9\" (UniqueName: \"kubernetes.io/projected/e4dd66bd-d882-4475-9eb3-eb70e9081b59-kube-api-access-6mrb9\") pod \"nova-metadata-0\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.365864 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.406599 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="268d0151-148e-46e8-9dbe-4227cdd3d0b3" path="/var/lib/kubelet/pods/268d0151-148e-46e8-9dbe-4227cdd3d0b3/volumes" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.407254 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44691fd6-915b-4927-b094-50b544fd638e" path="/var/lib/kubelet/pods/44691fd6-915b-4927-b094-50b544fd638e/volumes" Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.880551 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:37:45 crc kubenswrapper[4982]: I1205 19:37:45.894204 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:37:46 crc kubenswrapper[4982]: I1205 19:37:46.885733 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"18d28c18-728c-4040-8ce6-0d8c15c3f45a","Type":"ContainerStarted","Data":"e1d936ecf422ea6da061c7835f42755dfa25ccc2220e7fc98a3e1f0619112542"} Dec 05 19:37:46 crc kubenswrapper[4982]: I1205 19:37:46.886056 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"18d28c18-728c-4040-8ce6-0d8c15c3f45a","Type":"ContainerStarted","Data":"ec95762bf50e9eed878ef178b436be101145e01370dc233811c9fbd29a5a24a4"} Dec 05 19:37:46 crc kubenswrapper[4982]: I1205 19:37:46.890674 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e4dd66bd-d882-4475-9eb3-eb70e9081b59","Type":"ContainerStarted","Data":"e9f7246a7c4fa824614f5fec433980deef71c1aa55fdffefa32cee40a5cc2d9c"} Dec 05 19:37:46 crc kubenswrapper[4982]: I1205 19:37:46.890699 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e4dd66bd-d882-4475-9eb3-eb70e9081b59","Type":"ContainerStarted","Data":"78ca6eac509d404e57d12e2cb31eb1069784a31a727f5dcb18da5cbd2f9b6196"} Dec 05 19:37:46 crc kubenswrapper[4982]: I1205 19:37:46.890708 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e4dd66bd-d882-4475-9eb3-eb70e9081b59","Type":"ContainerStarted","Data":"e4d603032006db6e8962a27c7f11427b9101805861e6663eb52f4717b27923ff"} Dec 05 19:37:46 crc kubenswrapper[4982]: I1205 19:37:46.915046 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.915020773 podStartE2EDuration="2.915020773s" podCreationTimestamp="2025-12-05 19:37:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:37:46.913004062 +0000 UTC m=+1445.794890057" watchObservedRunningTime="2025-12-05 19:37:46.915020773 +0000 UTC m=+1445.796906778" Dec 05 19:37:46 crc kubenswrapper[4982]: I1205 19:37:46.946529 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.94650644 podStartE2EDuration="2.94650644s" podCreationTimestamp="2025-12-05 19:37:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:37:46.931752766 +0000 UTC m=+1445.813638761" watchObservedRunningTime="2025-12-05 19:37:46.94650644 +0000 UTC m=+1445.828392445" Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.816683 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.901020 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58815d17-89de-431e-89ac-4e344ca88e9e-config-data\") pod \"58815d17-89de-431e-89ac-4e344ca88e9e\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.901108 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58815d17-89de-431e-89ac-4e344ca88e9e-logs\") pod \"58815d17-89de-431e-89ac-4e344ca88e9e\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.901231 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58815d17-89de-431e-89ac-4e344ca88e9e-combined-ca-bundle\") pod \"58815d17-89de-431e-89ac-4e344ca88e9e\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.901309 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srdxj\" (UniqueName: \"kubernetes.io/projected/58815d17-89de-431e-89ac-4e344ca88e9e-kube-api-access-srdxj\") pod \"58815d17-89de-431e-89ac-4e344ca88e9e\" (UID: \"58815d17-89de-431e-89ac-4e344ca88e9e\") " Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.901566 4982 generic.go:334] "Generic (PLEG): container finished" podID="58815d17-89de-431e-89ac-4e344ca88e9e" containerID="348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6" exitCode=0 Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.901670 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"58815d17-89de-431e-89ac-4e344ca88e9e","Type":"ContainerDied","Data":"348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6"} Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.901699 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"58815d17-89de-431e-89ac-4e344ca88e9e","Type":"ContainerDied","Data":"f451e2bf1e01e88bc818c859b8ca8c39ed22450cff1d1a5ce6943450fd3c37e5"} Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.901717 4982 scope.go:117] "RemoveContainer" containerID="348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6" Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.901708 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58815d17-89de-431e-89ac-4e344ca88e9e-logs" (OuterVolumeSpecName: "logs") pod "58815d17-89de-431e-89ac-4e344ca88e9e" (UID: "58815d17-89de-431e-89ac-4e344ca88e9e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.901717 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.909317 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58815d17-89de-431e-89ac-4e344ca88e9e-kube-api-access-srdxj" (OuterVolumeSpecName: "kube-api-access-srdxj") pod "58815d17-89de-431e-89ac-4e344ca88e9e" (UID: "58815d17-89de-431e-89ac-4e344ca88e9e"). InnerVolumeSpecName "kube-api-access-srdxj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.938563 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58815d17-89de-431e-89ac-4e344ca88e9e-config-data" (OuterVolumeSpecName: "config-data") pod "58815d17-89de-431e-89ac-4e344ca88e9e" (UID: "58815d17-89de-431e-89ac-4e344ca88e9e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:47 crc kubenswrapper[4982]: I1205 19:37:47.940919 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/58815d17-89de-431e-89ac-4e344ca88e9e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "58815d17-89de-431e-89ac-4e344ca88e9e" (UID: "58815d17-89de-431e-89ac-4e344ca88e9e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.003562 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/58815d17-89de-431e-89ac-4e344ca88e9e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.003600 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/58815d17-89de-431e-89ac-4e344ca88e9e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.003614 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srdxj\" (UniqueName: \"kubernetes.io/projected/58815d17-89de-431e-89ac-4e344ca88e9e-kube-api-access-srdxj\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.003627 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/58815d17-89de-431e-89ac-4e344ca88e9e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.033099 4982 scope.go:117] "RemoveContainer" containerID="5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.060873 4982 scope.go:117] "RemoveContainer" containerID="348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6" Dec 05 19:37:48 crc kubenswrapper[4982]: E1205 19:37:48.063081 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6\": container with ID starting with 348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6 not found: ID does not exist" containerID="348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.063121 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6"} err="failed to get container status \"348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6\": rpc error: code = NotFound desc = could not find container \"348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6\": container with ID starting with 348dd053e0e50a5de6dd81cb1a6ddf9581bbec75d3f121ad5e21d6f212d7a8b6 not found: ID does not exist" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.063175 4982 scope.go:117] "RemoveContainer" containerID="5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c" Dec 05 19:37:48 crc kubenswrapper[4982]: E1205 19:37:48.063698 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c\": container with ID starting with 5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c not found: ID does not exist" containerID="5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.063749 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c"} err="failed to get container status \"5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c\": rpc error: code = NotFound desc = could not find container \"5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c\": container with ID starting with 5fd382fb1edb1ce00b7a76a634486722b74e540935cd8dcb691eb6750adf727c not found: ID does not exist" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.234354 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.246969 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.266422 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 19:37:48 crc kubenswrapper[4982]: E1205 19:37:48.267097 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58815d17-89de-431e-89ac-4e344ca88e9e" containerName="nova-api-api" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.267138 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="58815d17-89de-431e-89ac-4e344ca88e9e" containerName="nova-api-api" Dec 05 19:37:48 crc kubenswrapper[4982]: E1205 19:37:48.267203 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58815d17-89de-431e-89ac-4e344ca88e9e" containerName="nova-api-log" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.267214 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="58815d17-89de-431e-89ac-4e344ca88e9e" containerName="nova-api-log" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.268085 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="58815d17-89de-431e-89ac-4e344ca88e9e" containerName="nova-api-log" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.268165 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="58815d17-89de-431e-89ac-4e344ca88e9e" containerName="nova-api-api" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.273333 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.276417 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.278692 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.309694 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vd72f\" (UniqueName: \"kubernetes.io/projected/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-kube-api-access-vd72f\") pod \"nova-api-0\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.309962 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-config-data\") pod \"nova-api-0\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.310228 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.310463 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-logs\") pod \"nova-api-0\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.413099 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-logs\") pod \"nova-api-0\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.413762 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vd72f\" (UniqueName: \"kubernetes.io/projected/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-kube-api-access-vd72f\") pod \"nova-api-0\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.413539 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-logs\") pod \"nova-api-0\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.414037 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-config-data\") pod \"nova-api-0\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.415632 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.419289 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.419363 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-config-data\") pod \"nova-api-0\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.430178 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vd72f\" (UniqueName: \"kubernetes.io/projected/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-kube-api-access-vd72f\") pod \"nova-api-0\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " pod="openstack/nova-api-0" Dec 05 19:37:48 crc kubenswrapper[4982]: I1205 19:37:48.601210 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:37:49 crc kubenswrapper[4982]: I1205 19:37:49.145779 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:37:49 crc kubenswrapper[4982]: I1205 19:37:49.409736 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58815d17-89de-431e-89ac-4e344ca88e9e" path="/var/lib/kubelet/pods/58815d17-89de-431e-89ac-4e344ca88e9e/volumes" Dec 05 19:37:49 crc kubenswrapper[4982]: I1205 19:37:49.940767 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611","Type":"ContainerStarted","Data":"974cfe690a61cf33be20ea7ee17a0168e081a6723c98ef8b0b289db4b52201c5"} Dec 05 19:37:49 crc kubenswrapper[4982]: I1205 19:37:49.940814 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611","Type":"ContainerStarted","Data":"03c8537a4bd683f4a19460eeef001ca562a0ad15fe43ca289fdedfbf3745b080"} Dec 05 19:37:49 crc kubenswrapper[4982]: I1205 19:37:49.940825 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611","Type":"ContainerStarted","Data":"0cca9e181896bbbe09826045a91442a9e40069254f647e5dedfd42248b64b005"} Dec 05 19:37:49 crc kubenswrapper[4982]: I1205 19:37:49.963561 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.963541288 podStartE2EDuration="1.963541288s" podCreationTimestamp="2025-12-05 19:37:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:37:49.958126081 +0000 UTC m=+1448.840012116" watchObservedRunningTime="2025-12-05 19:37:49.963541288 +0000 UTC m=+1448.845427293" Dec 05 19:37:50 crc kubenswrapper[4982]: I1205 19:37:50.341752 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 19:37:50 crc kubenswrapper[4982]: I1205 19:37:50.369597 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 19:37:50 crc kubenswrapper[4982]: I1205 19:37:50.371520 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 19:37:51 crc kubenswrapper[4982]: I1205 19:37:51.510431 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:51 crc kubenswrapper[4982]: I1205 19:37:51.598385 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:52 crc kubenswrapper[4982]: I1205 19:37:52.263286 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p5gbn"] Dec 05 19:37:52 crc kubenswrapper[4982]: I1205 19:37:52.977862 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p5gbn" podUID="e51a33e0-110e-405e-8979-eda2e527de19" containerName="registry-server" containerID="cri-o://b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864" gracePeriod=2 Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.099352 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.484131 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.633116 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51a33e0-110e-405e-8979-eda2e527de19-utilities\") pod \"e51a33e0-110e-405e-8979-eda2e527de19\" (UID: \"e51a33e0-110e-405e-8979-eda2e527de19\") " Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.633542 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfr8v\" (UniqueName: \"kubernetes.io/projected/e51a33e0-110e-405e-8979-eda2e527de19-kube-api-access-zfr8v\") pod \"e51a33e0-110e-405e-8979-eda2e527de19\" (UID: \"e51a33e0-110e-405e-8979-eda2e527de19\") " Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.633586 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51a33e0-110e-405e-8979-eda2e527de19-catalog-content\") pod \"e51a33e0-110e-405e-8979-eda2e527de19\" (UID: \"e51a33e0-110e-405e-8979-eda2e527de19\") " Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.634091 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e51a33e0-110e-405e-8979-eda2e527de19-utilities" (OuterVolumeSpecName: "utilities") pod "e51a33e0-110e-405e-8979-eda2e527de19" (UID: "e51a33e0-110e-405e-8979-eda2e527de19"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.634247 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e51a33e0-110e-405e-8979-eda2e527de19-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.640217 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e51a33e0-110e-405e-8979-eda2e527de19-kube-api-access-zfr8v" (OuterVolumeSpecName: "kube-api-access-zfr8v") pod "e51a33e0-110e-405e-8979-eda2e527de19" (UID: "e51a33e0-110e-405e-8979-eda2e527de19"). InnerVolumeSpecName "kube-api-access-zfr8v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.730632 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e51a33e0-110e-405e-8979-eda2e527de19-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e51a33e0-110e-405e-8979-eda2e527de19" (UID: "e51a33e0-110e-405e-8979-eda2e527de19"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.736538 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfr8v\" (UniqueName: \"kubernetes.io/projected/e51a33e0-110e-405e-8979-eda2e527de19-kube-api-access-zfr8v\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.736725 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e51a33e0-110e-405e-8979-eda2e527de19-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.991400 4982 generic.go:334] "Generic (PLEG): container finished" podID="e51a33e0-110e-405e-8979-eda2e527de19" containerID="b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864" exitCode=0 Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.991446 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p5gbn" event={"ID":"e51a33e0-110e-405e-8979-eda2e527de19","Type":"ContainerDied","Data":"b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864"} Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.991501 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p5gbn" event={"ID":"e51a33e0-110e-405e-8979-eda2e527de19","Type":"ContainerDied","Data":"222702bf285708d7b2cc17c0efd232c2692031a689d39548d75a31f45ecf7191"} Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.991519 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p5gbn" Dec 05 19:37:53 crc kubenswrapper[4982]: I1205 19:37:53.991560 4982 scope.go:117] "RemoveContainer" containerID="b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864" Dec 05 19:37:54 crc kubenswrapper[4982]: I1205 19:37:54.027886 4982 scope.go:117] "RemoveContainer" containerID="54beafde54c3a31e5b7e5acc20fd066e0f0f6ad9fc3f587041c5ebc3bd420778" Dec 05 19:37:54 crc kubenswrapper[4982]: I1205 19:37:54.058133 4982 scope.go:117] "RemoveContainer" containerID="4faa95efcaa17ea81850e68f215b5df65fe97af458d28603e65087f49e628b8a" Dec 05 19:37:54 crc kubenswrapper[4982]: I1205 19:37:54.059437 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p5gbn"] Dec 05 19:37:54 crc kubenswrapper[4982]: I1205 19:37:54.070080 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p5gbn"] Dec 05 19:37:54 crc kubenswrapper[4982]: I1205 19:37:54.120869 4982 scope.go:117] "RemoveContainer" containerID="b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864" Dec 05 19:37:54 crc kubenswrapper[4982]: E1205 19:37:54.121446 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864\": container with ID starting with b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864 not found: ID does not exist" containerID="b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864" Dec 05 19:37:54 crc kubenswrapper[4982]: I1205 19:37:54.121510 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864"} err="failed to get container status \"b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864\": rpc error: code = NotFound desc = could not find container \"b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864\": container with ID starting with b13c54771edbdb9af4a2616078d6658efc2bfd06088d69898959b8d55a455864 not found: ID does not exist" Dec 05 19:37:54 crc kubenswrapper[4982]: I1205 19:37:54.121543 4982 scope.go:117] "RemoveContainer" containerID="54beafde54c3a31e5b7e5acc20fd066e0f0f6ad9fc3f587041c5ebc3bd420778" Dec 05 19:37:54 crc kubenswrapper[4982]: E1205 19:37:54.122072 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54beafde54c3a31e5b7e5acc20fd066e0f0f6ad9fc3f587041c5ebc3bd420778\": container with ID starting with 54beafde54c3a31e5b7e5acc20fd066e0f0f6ad9fc3f587041c5ebc3bd420778 not found: ID does not exist" containerID="54beafde54c3a31e5b7e5acc20fd066e0f0f6ad9fc3f587041c5ebc3bd420778" Dec 05 19:37:54 crc kubenswrapper[4982]: I1205 19:37:54.122246 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54beafde54c3a31e5b7e5acc20fd066e0f0f6ad9fc3f587041c5ebc3bd420778"} err="failed to get container status \"54beafde54c3a31e5b7e5acc20fd066e0f0f6ad9fc3f587041c5ebc3bd420778\": rpc error: code = NotFound desc = could not find container \"54beafde54c3a31e5b7e5acc20fd066e0f0f6ad9fc3f587041c5ebc3bd420778\": container with ID starting with 54beafde54c3a31e5b7e5acc20fd066e0f0f6ad9fc3f587041c5ebc3bd420778 not found: ID does not exist" Dec 05 19:37:54 crc kubenswrapper[4982]: I1205 19:37:54.122357 4982 scope.go:117] "RemoveContainer" containerID="4faa95efcaa17ea81850e68f215b5df65fe97af458d28603e65087f49e628b8a" Dec 05 19:37:54 crc kubenswrapper[4982]: E1205 19:37:54.122887 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4faa95efcaa17ea81850e68f215b5df65fe97af458d28603e65087f49e628b8a\": container with ID starting with 4faa95efcaa17ea81850e68f215b5df65fe97af458d28603e65087f49e628b8a not found: ID does not exist" containerID="4faa95efcaa17ea81850e68f215b5df65fe97af458d28603e65087f49e628b8a" Dec 05 19:37:54 crc kubenswrapper[4982]: I1205 19:37:54.122926 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4faa95efcaa17ea81850e68f215b5df65fe97af458d28603e65087f49e628b8a"} err="failed to get container status \"4faa95efcaa17ea81850e68f215b5df65fe97af458d28603e65087f49e628b8a\": rpc error: code = NotFound desc = could not find container \"4faa95efcaa17ea81850e68f215b5df65fe97af458d28603e65087f49e628b8a\": container with ID starting with 4faa95efcaa17ea81850e68f215b5df65fe97af458d28603e65087f49e628b8a not found: ID does not exist" Dec 05 19:37:55 crc kubenswrapper[4982]: I1205 19:37:55.341198 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 19:37:55 crc kubenswrapper[4982]: I1205 19:37:55.366626 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 19:37:55 crc kubenswrapper[4982]: I1205 19:37:55.366696 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 19:37:55 crc kubenswrapper[4982]: I1205 19:37:55.380245 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 19:37:55 crc kubenswrapper[4982]: I1205 19:37:55.411071 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e51a33e0-110e-405e-8979-eda2e527de19" path="/var/lib/kubelet/pods/e51a33e0-110e-405e-8979-eda2e527de19/volumes" Dec 05 19:37:56 crc kubenswrapper[4982]: I1205 19:37:56.056307 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 19:37:56 crc kubenswrapper[4982]: I1205 19:37:56.373355 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.221:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 19:37:56 crc kubenswrapper[4982]: I1205 19:37:56.379368 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.221:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 19:37:57 crc kubenswrapper[4982]: E1205 19:37:57.668481 4982 fsHandler.go:119] failed to collect filesystem stats - rootDiskErr: could not stat "/var/lib/containers/storage/overlay/1d927eb120091b3d2eba2115d3d5912382d45f2495803c021d2552894addf9d8/diff" to get inode usage: stat /var/lib/containers/storage/overlay/1d927eb120091b3d2eba2115d3d5912382d45f2495803c021d2552894addf9d8/diff: no such file or directory, extraDiskErr: Dec 05 19:37:58 crc kubenswrapper[4982]: I1205 19:37:58.602195 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 19:37:58 crc kubenswrapper[4982]: I1205 19:37:58.602502 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 19:37:59 crc kubenswrapper[4982]: I1205 19:37:59.685390 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.222:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 19:37:59 crc kubenswrapper[4982]: I1205 19:37:59.685472 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.222:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 19:38:05 crc kubenswrapper[4982]: I1205 19:38:05.376396 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 19:38:05 crc kubenswrapper[4982]: I1205 19:38:05.378664 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 19:38:05 crc kubenswrapper[4982]: I1205 19:38:05.383824 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 19:38:06 crc kubenswrapper[4982]: I1205 19:38:06.153946 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 19:38:06 crc kubenswrapper[4982]: I1205 19:38:06.519224 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 19:38:07 crc kubenswrapper[4982]: E1205 19:38:07.860476 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1fb8006_cfc6_412d_90be_9bb828949621.slice/crio-cc1cfd00a28206cb512c50ac8a0a398520cbcfa35d8a233a092fc8b9518ee017\": RecentStats: unable to find data in memory cache]" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.041029 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.165941 4982 generic.go:334] "Generic (PLEG): container finished" podID="14f5ae40-0fa5-4c05-b172-3edd52b0b313" containerID="56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1" exitCode=137 Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.167052 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.167142 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"14f5ae40-0fa5-4c05-b172-3edd52b0b313","Type":"ContainerDied","Data":"56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1"} Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.167201 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"14f5ae40-0fa5-4c05-b172-3edd52b0b313","Type":"ContainerDied","Data":"af0d3add732370b996c196b237d21ebf5fcfd724d59691bd4a608bfb6385d88f"} Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.167219 4982 scope.go:117] "RemoveContainer" containerID="56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.200422 4982 scope.go:117] "RemoveContainer" containerID="56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1" Dec 05 19:38:08 crc kubenswrapper[4982]: E1205 19:38:08.200901 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1\": container with ID starting with 56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1 not found: ID does not exist" containerID="56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.200948 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1"} err="failed to get container status \"56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1\": rpc error: code = NotFound desc = could not find container \"56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1\": container with ID starting with 56ce2d3ae0186a61ba3301ca54f44a04dae031a2c4c6d949cf3d180ce068d6b1 not found: ID does not exist" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.202764 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14f5ae40-0fa5-4c05-b172-3edd52b0b313-combined-ca-bundle\") pod \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\" (UID: \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\") " Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.202831 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7mt7j\" (UniqueName: \"kubernetes.io/projected/14f5ae40-0fa5-4c05-b172-3edd52b0b313-kube-api-access-7mt7j\") pod \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\" (UID: \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\") " Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.202998 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14f5ae40-0fa5-4c05-b172-3edd52b0b313-config-data\") pod \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\" (UID: \"14f5ae40-0fa5-4c05-b172-3edd52b0b313\") " Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.208465 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14f5ae40-0fa5-4c05-b172-3edd52b0b313-kube-api-access-7mt7j" (OuterVolumeSpecName: "kube-api-access-7mt7j") pod "14f5ae40-0fa5-4c05-b172-3edd52b0b313" (UID: "14f5ae40-0fa5-4c05-b172-3edd52b0b313"). InnerVolumeSpecName "kube-api-access-7mt7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.236443 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14f5ae40-0fa5-4c05-b172-3edd52b0b313-config-data" (OuterVolumeSpecName: "config-data") pod "14f5ae40-0fa5-4c05-b172-3edd52b0b313" (UID: "14f5ae40-0fa5-4c05-b172-3edd52b0b313"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.238040 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14f5ae40-0fa5-4c05-b172-3edd52b0b313-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14f5ae40-0fa5-4c05-b172-3edd52b0b313" (UID: "14f5ae40-0fa5-4c05-b172-3edd52b0b313"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.305382 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14f5ae40-0fa5-4c05-b172-3edd52b0b313-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.305427 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14f5ae40-0fa5-4c05-b172-3edd52b0b313-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.305442 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7mt7j\" (UniqueName: \"kubernetes.io/projected/14f5ae40-0fa5-4c05-b172-3edd52b0b313-kube-api-access-7mt7j\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.506434 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.523879 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.538246 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 19:38:08 crc kubenswrapper[4982]: E1205 19:38:08.538824 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14f5ae40-0fa5-4c05-b172-3edd52b0b313" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.538841 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="14f5ae40-0fa5-4c05-b172-3edd52b0b313" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 19:38:08 crc kubenswrapper[4982]: E1205 19:38:08.538887 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51a33e0-110e-405e-8979-eda2e527de19" containerName="extract-utilities" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.538897 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51a33e0-110e-405e-8979-eda2e527de19" containerName="extract-utilities" Dec 05 19:38:08 crc kubenswrapper[4982]: E1205 19:38:08.538914 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51a33e0-110e-405e-8979-eda2e527de19" containerName="extract-content" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.538923 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51a33e0-110e-405e-8979-eda2e527de19" containerName="extract-content" Dec 05 19:38:08 crc kubenswrapper[4982]: E1205 19:38:08.538946 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51a33e0-110e-405e-8979-eda2e527de19" containerName="registry-server" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.538954 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51a33e0-110e-405e-8979-eda2e527de19" containerName="registry-server" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.539225 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="14f5ae40-0fa5-4c05-b172-3edd52b0b313" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.539247 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e51a33e0-110e-405e-8979-eda2e527de19" containerName="registry-server" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.540265 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.545483 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.545757 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.545906 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.550370 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.607255 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.607319 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.607757 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.607781 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.611247 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.611960 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.715947 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/592eec2a-b340-4c42-8b06-ab477b4aecfa-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.716453 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592eec2a-b340-4c42-8b06-ab477b4aecfa-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.716529 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592eec2a-b340-4c42-8b06-ab477b4aecfa-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.716713 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/592eec2a-b340-4c42-8b06-ab477b4aecfa-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.716784 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ctdg4\" (UniqueName: \"kubernetes.io/projected/592eec2a-b340-4c42-8b06-ab477b4aecfa-kube-api-access-ctdg4\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.831694 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78468d7767-ntj5m"] Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.834383 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592eec2a-b340-4c42-8b06-ab477b4aecfa-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.834459 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592eec2a-b340-4c42-8b06-ab477b4aecfa-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.834580 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/592eec2a-b340-4c42-8b06-ab477b4aecfa-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.834623 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ctdg4\" (UniqueName: \"kubernetes.io/projected/592eec2a-b340-4c42-8b06-ab477b4aecfa-kube-api-access-ctdg4\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.834660 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/592eec2a-b340-4c42-8b06-ab477b4aecfa-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.840264 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/592eec2a-b340-4c42-8b06-ab477b4aecfa-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.843745 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592eec2a-b340-4c42-8b06-ab477b4aecfa-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.846858 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/592eec2a-b340-4c42-8b06-ab477b4aecfa-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.848558 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/592eec2a-b340-4c42-8b06-ab477b4aecfa-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.866575 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ctdg4\" (UniqueName: \"kubernetes.io/projected/592eec2a-b340-4c42-8b06-ab477b4aecfa-kube-api-access-ctdg4\") pod \"nova-cell1-novncproxy-0\" (UID: \"592eec2a-b340-4c42-8b06-ab477b4aecfa\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.869323 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78468d7767-ntj5m"] Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.869677 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:08 crc kubenswrapper[4982]: I1205 19:38:08.870977 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.040907 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-ovsdbserver-sb\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.041302 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-config\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.041463 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgg4g\" (UniqueName: \"kubernetes.io/projected/a957342e-1213-473c-a9a3-bf1e90bf9bf7-kube-api-access-sgg4g\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.041484 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-dns-svc\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.041504 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-ovsdbserver-nb\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.041535 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-dns-swift-storage-0\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.143099 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgg4g\" (UniqueName: \"kubernetes.io/projected/a957342e-1213-473c-a9a3-bf1e90bf9bf7-kube-api-access-sgg4g\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.143178 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-dns-svc\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.143204 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-ovsdbserver-nb\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.143230 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-dns-swift-storage-0\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.143272 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-ovsdbserver-sb\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.143317 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-config\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.144637 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-config\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.144817 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-ovsdbserver-nb\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.145261 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-dns-svc\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.145296 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-dns-swift-storage-0\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.145266 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-ovsdbserver-sb\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.168266 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgg4g\" (UniqueName: \"kubernetes.io/projected/a957342e-1213-473c-a9a3-bf1e90bf9bf7-kube-api-access-sgg4g\") pod \"dnsmasq-dns-78468d7767-ntj5m\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.292619 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.418288 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14f5ae40-0fa5-4c05-b172-3edd52b0b313" path="/var/lib/kubelet/pods/14f5ae40-0fa5-4c05-b172-3edd52b0b313/volumes" Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.444211 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 19:38:09 crc kubenswrapper[4982]: I1205 19:38:09.812981 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78468d7767-ntj5m"] Dec 05 19:38:09 crc kubenswrapper[4982]: W1205 19:38:09.813675 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda957342e_1213_473c_a9a3_bf1e90bf9bf7.slice/crio-14fde105179c9afa1e3389c9c4666b82c7e9ebb51f244cee7ae0b1cfefb003ca WatchSource:0}: Error finding container 14fde105179c9afa1e3389c9c4666b82c7e9ebb51f244cee7ae0b1cfefb003ca: Status 404 returned error can't find the container with id 14fde105179c9afa1e3389c9c4666b82c7e9ebb51f244cee7ae0b1cfefb003ca Dec 05 19:38:10 crc kubenswrapper[4982]: I1205 19:38:10.194922 4982 generic.go:334] "Generic (PLEG): container finished" podID="a957342e-1213-473c-a9a3-bf1e90bf9bf7" containerID="565492ea7d9569f76f04f7fa23be473e07479dd5f85f727dfcbb4b85ce042ed4" exitCode=0 Dec 05 19:38:10 crc kubenswrapper[4982]: I1205 19:38:10.195026 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" event={"ID":"a957342e-1213-473c-a9a3-bf1e90bf9bf7","Type":"ContainerDied","Data":"565492ea7d9569f76f04f7fa23be473e07479dd5f85f727dfcbb4b85ce042ed4"} Dec 05 19:38:10 crc kubenswrapper[4982]: I1205 19:38:10.195232 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" event={"ID":"a957342e-1213-473c-a9a3-bf1e90bf9bf7","Type":"ContainerStarted","Data":"14fde105179c9afa1e3389c9c4666b82c7e9ebb51f244cee7ae0b1cfefb003ca"} Dec 05 19:38:10 crc kubenswrapper[4982]: I1205 19:38:10.198865 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"592eec2a-b340-4c42-8b06-ab477b4aecfa","Type":"ContainerStarted","Data":"559093aa0d95d43e90e0268bcfe2a439c8df7f65808c5affb2b3be47d28c6fe4"} Dec 05 19:38:10 crc kubenswrapper[4982]: I1205 19:38:10.198909 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"592eec2a-b340-4c42-8b06-ab477b4aecfa","Type":"ContainerStarted","Data":"08d6792fb04c55c332037326413d8d9f4aa2b9c675c791d036977f214564b639"} Dec 05 19:38:10 crc kubenswrapper[4982]: I1205 19:38:10.239546 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.239524006 podStartE2EDuration="2.239524006s" podCreationTimestamp="2025-12-05 19:38:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:38:10.234658122 +0000 UTC m=+1469.116544127" watchObservedRunningTime="2025-12-05 19:38:10.239524006 +0000 UTC m=+1469.121410001" Dec 05 19:38:10 crc kubenswrapper[4982]: I1205 19:38:10.796553 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:38:10 crc kubenswrapper[4982]: I1205 19:38:10.797107 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="ceilometer-central-agent" containerID="cri-o://62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb" gracePeriod=30 Dec 05 19:38:10 crc kubenswrapper[4982]: I1205 19:38:10.797504 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="proxy-httpd" containerID="cri-o://72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a" gracePeriod=30 Dec 05 19:38:10 crc kubenswrapper[4982]: I1205 19:38:10.797547 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="sg-core" containerID="cri-o://55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c" gracePeriod=30 Dec 05 19:38:10 crc kubenswrapper[4982]: I1205 19:38:10.797583 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="ceilometer-notification-agent" containerID="cri-o://457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce" gracePeriod=30 Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.209962 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" event={"ID":"a957342e-1213-473c-a9a3-bf1e90bf9bf7","Type":"ContainerStarted","Data":"c2b1af06ccac3262967938e0ae84297794dbdbc59396ad07f2e5edb772001a1e"} Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.210085 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.213040 4982 generic.go:334] "Generic (PLEG): container finished" podID="65085e69-df46-4bb8-a7cc-173d21c73088" containerID="72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a" exitCode=0 Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.213259 4982 generic.go:334] "Generic (PLEG): container finished" podID="65085e69-df46-4bb8-a7cc-173d21c73088" containerID="55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c" exitCode=2 Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.213118 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65085e69-df46-4bb8-a7cc-173d21c73088","Type":"ContainerDied","Data":"72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a"} Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.213635 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65085e69-df46-4bb8-a7cc-173d21c73088","Type":"ContainerDied","Data":"55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c"} Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.235825 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" podStartSLOduration=3.235809348 podStartE2EDuration="3.235809348s" podCreationTimestamp="2025-12-05 19:38:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:38:11.232208866 +0000 UTC m=+1470.114094861" watchObservedRunningTime="2025-12-05 19:38:11.235809348 +0000 UTC m=+1470.117695343" Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.820755 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.896999 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-sg-core-conf-yaml\") pod \"65085e69-df46-4bb8-a7cc-173d21c73088\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.897080 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-config-data\") pod \"65085e69-df46-4bb8-a7cc-173d21c73088\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.897118 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-ceilometer-tls-certs\") pod \"65085e69-df46-4bb8-a7cc-173d21c73088\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.897185 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b27zq\" (UniqueName: \"kubernetes.io/projected/65085e69-df46-4bb8-a7cc-173d21c73088-kube-api-access-b27zq\") pod \"65085e69-df46-4bb8-a7cc-173d21c73088\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.897211 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-combined-ca-bundle\") pod \"65085e69-df46-4bb8-a7cc-173d21c73088\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.897239 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65085e69-df46-4bb8-a7cc-173d21c73088-run-httpd\") pod \"65085e69-df46-4bb8-a7cc-173d21c73088\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.897306 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65085e69-df46-4bb8-a7cc-173d21c73088-log-httpd\") pod \"65085e69-df46-4bb8-a7cc-173d21c73088\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.897403 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-scripts\") pod \"65085e69-df46-4bb8-a7cc-173d21c73088\" (UID: \"65085e69-df46-4bb8-a7cc-173d21c73088\") " Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.898301 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65085e69-df46-4bb8-a7cc-173d21c73088-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "65085e69-df46-4bb8-a7cc-173d21c73088" (UID: "65085e69-df46-4bb8-a7cc-173d21c73088"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.898563 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65085e69-df46-4bb8-a7cc-173d21c73088-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "65085e69-df46-4bb8-a7cc-173d21c73088" (UID: "65085e69-df46-4bb8-a7cc-173d21c73088"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.898723 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65085e69-df46-4bb8-a7cc-173d21c73088-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.898754 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65085e69-df46-4bb8-a7cc-173d21c73088-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.927519 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65085e69-df46-4bb8-a7cc-173d21c73088-kube-api-access-b27zq" (OuterVolumeSpecName: "kube-api-access-b27zq") pod "65085e69-df46-4bb8-a7cc-173d21c73088" (UID: "65085e69-df46-4bb8-a7cc-173d21c73088"). InnerVolumeSpecName "kube-api-access-b27zq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.934987 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-scripts" (OuterVolumeSpecName: "scripts") pod "65085e69-df46-4bb8-a7cc-173d21c73088" (UID: "65085e69-df46-4bb8-a7cc-173d21c73088"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.939512 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.947354 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" containerName="nova-api-api" containerID="cri-o://974cfe690a61cf33be20ea7ee17a0168e081a6723c98ef8b0b289db4b52201c5" gracePeriod=30 Dec 05 19:38:11 crc kubenswrapper[4982]: I1205 19:38:11.949542 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" containerName="nova-api-log" containerID="cri-o://03c8537a4bd683f4a19460eeef001ca562a0ad15fe43ca289fdedfbf3745b080" gracePeriod=30 Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.001706 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b27zq\" (UniqueName: \"kubernetes.io/projected/65085e69-df46-4bb8-a7cc-173d21c73088-kube-api-access-b27zq\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.001741 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.023045 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "65085e69-df46-4bb8-a7cc-173d21c73088" (UID: "65085e69-df46-4bb8-a7cc-173d21c73088"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.105644 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.139616 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "65085e69-df46-4bb8-a7cc-173d21c73088" (UID: "65085e69-df46-4bb8-a7cc-173d21c73088"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.202707 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65085e69-df46-4bb8-a7cc-173d21c73088" (UID: "65085e69-df46-4bb8-a7cc-173d21c73088"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.208037 4982 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.208060 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.212865 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-config-data" (OuterVolumeSpecName: "config-data") pod "65085e69-df46-4bb8-a7cc-173d21c73088" (UID: "65085e69-df46-4bb8-a7cc-173d21c73088"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.222986 4982 generic.go:334] "Generic (PLEG): container finished" podID="65085e69-df46-4bb8-a7cc-173d21c73088" containerID="457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce" exitCode=0 Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.223125 4982 generic.go:334] "Generic (PLEG): container finished" podID="65085e69-df46-4bb8-a7cc-173d21c73088" containerID="62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb" exitCode=0 Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.223164 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.223318 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65085e69-df46-4bb8-a7cc-173d21c73088","Type":"ContainerDied","Data":"457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce"} Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.223639 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65085e69-df46-4bb8-a7cc-173d21c73088","Type":"ContainerDied","Data":"62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb"} Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.223704 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65085e69-df46-4bb8-a7cc-173d21c73088","Type":"ContainerDied","Data":"15b028646d6afe6b21beddc617a089bf2109e0c5ff812f3a615504422858c3b3"} Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.223673 4982 scope.go:117] "RemoveContainer" containerID="72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.233929 4982 generic.go:334] "Generic (PLEG): container finished" podID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" containerID="03c8537a4bd683f4a19460eeef001ca562a0ad15fe43ca289fdedfbf3745b080" exitCode=143 Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.235315 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611","Type":"ContainerDied","Data":"03c8537a4bd683f4a19460eeef001ca562a0ad15fe43ca289fdedfbf3745b080"} Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.253231 4982 scope.go:117] "RemoveContainer" containerID="55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.310062 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65085e69-df46-4bb8-a7cc-173d21c73088-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.314580 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.315467 4982 scope.go:117] "RemoveContainer" containerID="457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.342275 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.367377 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:38:12 crc kubenswrapper[4982]: E1205 19:38:12.367834 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="ceilometer-central-agent" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.367852 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="ceilometer-central-agent" Dec 05 19:38:12 crc kubenswrapper[4982]: E1205 19:38:12.367873 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="proxy-httpd" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.367881 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="proxy-httpd" Dec 05 19:38:12 crc kubenswrapper[4982]: E1205 19:38:12.367904 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="sg-core" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.367912 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="sg-core" Dec 05 19:38:12 crc kubenswrapper[4982]: E1205 19:38:12.367931 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="ceilometer-notification-agent" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.367938 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="ceilometer-notification-agent" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.368239 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="ceilometer-central-agent" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.368262 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="ceilometer-notification-agent" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.368272 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="proxy-httpd" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.368287 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" containerName="sg-core" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.369100 4982 scope.go:117] "RemoveContainer" containerID="62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.371204 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.373405 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.373620 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.373787 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.385993 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.410225 4982 scope.go:117] "RemoveContainer" containerID="72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a" Dec 05 19:38:12 crc kubenswrapper[4982]: E1205 19:38:12.411725 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a\": container with ID starting with 72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a not found: ID does not exist" containerID="72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.411795 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a"} err="failed to get container status \"72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a\": rpc error: code = NotFound desc = could not find container \"72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a\": container with ID starting with 72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a not found: ID does not exist" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.411825 4982 scope.go:117] "RemoveContainer" containerID="55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c" Dec 05 19:38:12 crc kubenswrapper[4982]: E1205 19:38:12.413670 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c\": container with ID starting with 55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c not found: ID does not exist" containerID="55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.413718 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c"} err="failed to get container status \"55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c\": rpc error: code = NotFound desc = could not find container \"55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c\": container with ID starting with 55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c not found: ID does not exist" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.413741 4982 scope.go:117] "RemoveContainer" containerID="457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce" Dec 05 19:38:12 crc kubenswrapper[4982]: E1205 19:38:12.414697 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce\": container with ID starting with 457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce not found: ID does not exist" containerID="457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.414726 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce"} err="failed to get container status \"457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce\": rpc error: code = NotFound desc = could not find container \"457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce\": container with ID starting with 457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce not found: ID does not exist" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.414748 4982 scope.go:117] "RemoveContainer" containerID="62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb" Dec 05 19:38:12 crc kubenswrapper[4982]: E1205 19:38:12.415704 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb\": container with ID starting with 62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb not found: ID does not exist" containerID="62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.415740 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb"} err="failed to get container status \"62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb\": rpc error: code = NotFound desc = could not find container \"62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb\": container with ID starting with 62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb not found: ID does not exist" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.415758 4982 scope.go:117] "RemoveContainer" containerID="72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.416167 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a"} err="failed to get container status \"72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a\": rpc error: code = NotFound desc = could not find container \"72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a\": container with ID starting with 72b334c1781859eb7f59ceafdcc21ea2f5cf1b24f7e4c7c34e8bacc34fd06f2a not found: ID does not exist" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.416194 4982 scope.go:117] "RemoveContainer" containerID="55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.420418 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c"} err="failed to get container status \"55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c\": rpc error: code = NotFound desc = could not find container \"55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c\": container with ID starting with 55230d5fac727abe49b833d91c3acc40dfcd29563e70a6bf3a25d43ca067dc0c not found: ID does not exist" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.420462 4982 scope.go:117] "RemoveContainer" containerID="457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.425306 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce"} err="failed to get container status \"457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce\": rpc error: code = NotFound desc = could not find container \"457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce\": container with ID starting with 457605a9b4f3b7caf99fe99eff91c9ea4888886ef1ae11043ad4d04b26b21cce not found: ID does not exist" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.425341 4982 scope.go:117] "RemoveContainer" containerID="62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.428679 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb"} err="failed to get container status \"62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb\": rpc error: code = NotFound desc = could not find container \"62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb\": container with ID starting with 62de1e7a5e35d1a531a2f94bff39cc731531635c452f0e0fe67fc73d0c313feb not found: ID does not exist" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.516476 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.516518 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-run-httpd\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.516537 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-scripts\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.516584 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6679w\" (UniqueName: \"kubernetes.io/projected/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-kube-api-access-6679w\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.516665 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-config-data\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.516726 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.516786 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.516833 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-log-httpd\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.618200 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.618258 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-run-httpd\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.618283 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-scripts\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.618347 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6679w\" (UniqueName: \"kubernetes.io/projected/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-kube-api-access-6679w\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.618444 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-config-data\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.618521 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.618591 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.618651 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-log-httpd\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.619253 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-log-httpd\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.620486 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-run-httpd\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.623766 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.623907 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-scripts\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.624488 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.625526 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-config-data\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.626627 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.634388 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6679w\" (UniqueName: \"kubernetes.io/projected/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-kube-api-access-6679w\") pod \"ceilometer-0\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " pod="openstack/ceilometer-0" Dec 05 19:38:12 crc kubenswrapper[4982]: I1205 19:38:12.695064 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:38:13 crc kubenswrapper[4982]: I1205 19:38:13.188442 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:38:13 crc kubenswrapper[4982]: W1205 19:38:13.190644 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6c25395b_4bfb_4a7a_a0c6_b2de0012da9b.slice/crio-8b52f18314f0a95ca1aa1ea46f0c2a192c40071880df47ab67499a9844aac850 WatchSource:0}: Error finding container 8b52f18314f0a95ca1aa1ea46f0c2a192c40071880df47ab67499a9844aac850: Status 404 returned error can't find the container with id 8b52f18314f0a95ca1aa1ea46f0c2a192c40071880df47ab67499a9844aac850 Dec 05 19:38:13 crc kubenswrapper[4982]: I1205 19:38:13.244900 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b","Type":"ContainerStarted","Data":"8b52f18314f0a95ca1aa1ea46f0c2a192c40071880df47ab67499a9844aac850"} Dec 05 19:38:13 crc kubenswrapper[4982]: I1205 19:38:13.287657 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:38:13 crc kubenswrapper[4982]: I1205 19:38:13.403643 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65085e69-df46-4bb8-a7cc-173d21c73088" path="/var/lib/kubelet/pods/65085e69-df46-4bb8-a7cc-173d21c73088/volumes" Dec 05 19:38:13 crc kubenswrapper[4982]: I1205 19:38:13.872228 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:14 crc kubenswrapper[4982]: I1205 19:38:14.255964 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b","Type":"ContainerStarted","Data":"b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b"} Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.268417 4982 generic.go:334] "Generic (PLEG): container finished" podID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" containerID="974cfe690a61cf33be20ea7ee17a0168e081a6723c98ef8b0b289db4b52201c5" exitCode=0 Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.268584 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611","Type":"ContainerDied","Data":"974cfe690a61cf33be20ea7ee17a0168e081a6723c98ef8b0b289db4b52201c5"} Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.271005 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b","Type":"ContainerStarted","Data":"6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1"} Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.271038 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b","Type":"ContainerStarted","Data":"7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71"} Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.636375 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.788957 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-config-data\") pod \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.789017 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-logs\") pod \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.789068 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vd72f\" (UniqueName: \"kubernetes.io/projected/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-kube-api-access-vd72f\") pod \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.789215 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-combined-ca-bundle\") pod \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\" (UID: \"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611\") " Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.789525 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-logs" (OuterVolumeSpecName: "logs") pod "25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" (UID: "25aec23d-3e9f-4a93-b4d9-bc9c42cf7611"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.790213 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.794436 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-kube-api-access-vd72f" (OuterVolumeSpecName: "kube-api-access-vd72f") pod "25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" (UID: "25aec23d-3e9f-4a93-b4d9-bc9c42cf7611"). InnerVolumeSpecName "kube-api-access-vd72f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.837696 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" (UID: "25aec23d-3e9f-4a93-b4d9-bc9c42cf7611"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.869188 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-config-data" (OuterVolumeSpecName: "config-data") pod "25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" (UID: "25aec23d-3e9f-4a93-b4d9-bc9c42cf7611"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.892397 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.892426 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:15 crc kubenswrapper[4982]: I1205 19:38:15.892435 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vd72f\" (UniqueName: \"kubernetes.io/projected/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611-kube-api-access-vd72f\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.283900 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.283811 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"25aec23d-3e9f-4a93-b4d9-bc9c42cf7611","Type":"ContainerDied","Data":"0cca9e181896bbbe09826045a91442a9e40069254f647e5dedfd42248b64b005"} Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.288297 4982 scope.go:117] "RemoveContainer" containerID="974cfe690a61cf33be20ea7ee17a0168e081a6723c98ef8b0b289db4b52201c5" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.318383 4982 scope.go:117] "RemoveContainer" containerID="03c8537a4bd683f4a19460eeef001ca562a0ad15fe43ca289fdedfbf3745b080" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.331205 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.342711 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.359640 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 19:38:16 crc kubenswrapper[4982]: E1205 19:38:16.360177 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" containerName="nova-api-api" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.360194 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" containerName="nova-api-api" Dec 05 19:38:16 crc kubenswrapper[4982]: E1205 19:38:16.360231 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" containerName="nova-api-log" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.360240 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" containerName="nova-api-log" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.360491 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" containerName="nova-api-api" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.360509 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" containerName="nova-api-log" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.362016 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.365893 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.366239 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.367073 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.377457 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.504225 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.504321 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-config-data\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.504347 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96dba84f-7287-43fc-80d4-80e9c4af787c-logs\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.504367 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.504392 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-public-tls-certs\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.504425 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7wpbc\" (UniqueName: \"kubernetes.io/projected/96dba84f-7287-43fc-80d4-80e9c4af787c-kube-api-access-7wpbc\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.605992 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-public-tls-certs\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.606324 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7wpbc\" (UniqueName: \"kubernetes.io/projected/96dba84f-7287-43fc-80d4-80e9c4af787c-kube-api-access-7wpbc\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.606593 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.606743 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-config-data\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.606826 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96dba84f-7287-43fc-80d4-80e9c4af787c-logs\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.606952 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.607451 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96dba84f-7287-43fc-80d4-80e9c4af787c-logs\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.610811 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-public-tls-certs\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.610852 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.611575 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.616259 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-config-data\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.625792 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7wpbc\" (UniqueName: \"kubernetes.io/projected/96dba84f-7287-43fc-80d4-80e9c4af787c-kube-api-access-7wpbc\") pod \"nova-api-0\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " pod="openstack/nova-api-0" Dec 05 19:38:16 crc kubenswrapper[4982]: I1205 19:38:16.717865 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:38:17 crc kubenswrapper[4982]: W1205 19:38:17.187760 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96dba84f_7287_43fc_80d4_80e9c4af787c.slice/crio-5e1756b98ab9c9cf4230db349974c4983a2dd014c9de527738ac4aa6cdfe045a WatchSource:0}: Error finding container 5e1756b98ab9c9cf4230db349974c4983a2dd014c9de527738ac4aa6cdfe045a: Status 404 returned error can't find the container with id 5e1756b98ab9c9cf4230db349974c4983a2dd014c9de527738ac4aa6cdfe045a Dec 05 19:38:17 crc kubenswrapper[4982]: I1205 19:38:17.192492 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:38:17 crc kubenswrapper[4982]: I1205 19:38:17.306373 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b","Type":"ContainerStarted","Data":"0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70"} Dec 05 19:38:17 crc kubenswrapper[4982]: I1205 19:38:17.306695 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="ceilometer-central-agent" containerID="cri-o://b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b" gracePeriod=30 Dec 05 19:38:17 crc kubenswrapper[4982]: I1205 19:38:17.306828 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 19:38:17 crc kubenswrapper[4982]: I1205 19:38:17.307207 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="proxy-httpd" containerID="cri-o://0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70" gracePeriod=30 Dec 05 19:38:17 crc kubenswrapper[4982]: I1205 19:38:17.307436 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="ceilometer-notification-agent" containerID="cri-o://7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71" gracePeriod=30 Dec 05 19:38:17 crc kubenswrapper[4982]: I1205 19:38:17.307595 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="sg-core" containerID="cri-o://6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1" gracePeriod=30 Dec 05 19:38:17 crc kubenswrapper[4982]: I1205 19:38:17.309993 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96dba84f-7287-43fc-80d4-80e9c4af787c","Type":"ContainerStarted","Data":"5e1756b98ab9c9cf4230db349974c4983a2dd014c9de527738ac4aa6cdfe045a"} Dec 05 19:38:17 crc kubenswrapper[4982]: I1205 19:38:17.404890 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25aec23d-3e9f-4a93-b4d9-bc9c42cf7611" path="/var/lib/kubelet/pods/25aec23d-3e9f-4a93-b4d9-bc9c42cf7611/volumes" Dec 05 19:38:18 crc kubenswrapper[4982]: E1205 19:38:18.125861 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1fb8006_cfc6_412d_90be_9bb828949621.slice/crio-cc1cfd00a28206cb512c50ac8a0a398520cbcfa35d8a233a092fc8b9518ee017\": RecentStats: unable to find data in memory cache]" Dec 05 19:38:18 crc kubenswrapper[4982]: I1205 19:38:18.326362 4982 generic.go:334] "Generic (PLEG): container finished" podID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerID="0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70" exitCode=0 Dec 05 19:38:18 crc kubenswrapper[4982]: I1205 19:38:18.326567 4982 generic.go:334] "Generic (PLEG): container finished" podID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerID="6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1" exitCode=2 Dec 05 19:38:18 crc kubenswrapper[4982]: I1205 19:38:18.326574 4982 generic.go:334] "Generic (PLEG): container finished" podID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerID="7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71" exitCode=0 Dec 05 19:38:18 crc kubenswrapper[4982]: I1205 19:38:18.326604 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b","Type":"ContainerDied","Data":"0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70"} Dec 05 19:38:18 crc kubenswrapper[4982]: I1205 19:38:18.326629 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b","Type":"ContainerDied","Data":"6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1"} Dec 05 19:38:18 crc kubenswrapper[4982]: I1205 19:38:18.326638 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b","Type":"ContainerDied","Data":"7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71"} Dec 05 19:38:18 crc kubenswrapper[4982]: I1205 19:38:18.327716 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96dba84f-7287-43fc-80d4-80e9c4af787c","Type":"ContainerStarted","Data":"b8cd0c581a91dd6efd1b62716bf596fe34ba36e7d79f921c91c2d630a930c66c"} Dec 05 19:38:18 crc kubenswrapper[4982]: I1205 19:38:18.327734 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96dba84f-7287-43fc-80d4-80e9c4af787c","Type":"ContainerStarted","Data":"b11d1b7382b2967e9292886664692c078e569976a36ef5dbf54831d3bd2b4c45"} Dec 05 19:38:18 crc kubenswrapper[4982]: I1205 19:38:18.349113 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.349096245 podStartE2EDuration="2.349096245s" podCreationTimestamp="2025-12-05 19:38:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:38:18.34812875 +0000 UTC m=+1477.230014775" watchObservedRunningTime="2025-12-05 19:38:18.349096245 +0000 UTC m=+1477.230982240" Dec 05 19:38:18 crc kubenswrapper[4982]: I1205 19:38:18.356771 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.441943316 podStartE2EDuration="6.356754518s" podCreationTimestamp="2025-12-05 19:38:12 +0000 UTC" firstStartedPulling="2025-12-05 19:38:13.193042337 +0000 UTC m=+1472.074928322" lastFinishedPulling="2025-12-05 19:38:16.107853529 +0000 UTC m=+1474.989739524" observedRunningTime="2025-12-05 19:38:17.337207618 +0000 UTC m=+1476.219093633" watchObservedRunningTime="2025-12-05 19:38:18.356754518 +0000 UTC m=+1477.238640513" Dec 05 19:38:18 crc kubenswrapper[4982]: I1205 19:38:18.871511 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:18 crc kubenswrapper[4982]: I1205 19:38:18.892924 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.293915 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.373516 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.383750 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c9cb78d75-t4n2t"] Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.384129 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" podUID="c78c0950-654a-40c4-8ae4-bf213130fbcf" containerName="dnsmasq-dns" containerID="cri-o://5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6" gracePeriod=10 Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.555340 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-lvhmg"] Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.557431 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.559864 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.561664 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.568974 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-lvhmg"] Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.678399 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-config-data\") pod \"nova-cell1-cell-mapping-lvhmg\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.678592 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pv2kv\" (UniqueName: \"kubernetes.io/projected/cb75aca9-9973-41f0-8bf2-02a97b01f57f-kube-api-access-pv2kv\") pod \"nova-cell1-cell-mapping-lvhmg\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.678659 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-scripts\") pod \"nova-cell1-cell-mapping-lvhmg\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.678705 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-lvhmg\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.780570 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pv2kv\" (UniqueName: \"kubernetes.io/projected/cb75aca9-9973-41f0-8bf2-02a97b01f57f-kube-api-access-pv2kv\") pod \"nova-cell1-cell-mapping-lvhmg\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.780702 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-scripts\") pod \"nova-cell1-cell-mapping-lvhmg\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.780807 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-lvhmg\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.780858 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-config-data\") pod \"nova-cell1-cell-mapping-lvhmg\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.786694 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-scripts\") pod \"nova-cell1-cell-mapping-lvhmg\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.787863 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-config-data\") pod \"nova-cell1-cell-mapping-lvhmg\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.790830 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-lvhmg\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.814678 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pv2kv\" (UniqueName: \"kubernetes.io/projected/cb75aca9-9973-41f0-8bf2-02a97b01f57f-kube-api-access-pv2kv\") pod \"nova-cell1-cell-mapping-lvhmg\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:19 crc kubenswrapper[4982]: I1205 19:38:19.880586 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.030528 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.086818 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-ovsdbserver-sb\") pod \"c78c0950-654a-40c4-8ae4-bf213130fbcf\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.086954 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-dns-swift-storage-0\") pod \"c78c0950-654a-40c4-8ae4-bf213130fbcf\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.087007 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-dns-svc\") pod \"c78c0950-654a-40c4-8ae4-bf213130fbcf\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.087046 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gvxgp\" (UniqueName: \"kubernetes.io/projected/c78c0950-654a-40c4-8ae4-bf213130fbcf-kube-api-access-gvxgp\") pod \"c78c0950-654a-40c4-8ae4-bf213130fbcf\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.087196 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-config\") pod \"c78c0950-654a-40c4-8ae4-bf213130fbcf\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.087356 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-ovsdbserver-nb\") pod \"c78c0950-654a-40c4-8ae4-bf213130fbcf\" (UID: \"c78c0950-654a-40c4-8ae4-bf213130fbcf\") " Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.098358 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c78c0950-654a-40c4-8ae4-bf213130fbcf-kube-api-access-gvxgp" (OuterVolumeSpecName: "kube-api-access-gvxgp") pod "c78c0950-654a-40c4-8ae4-bf213130fbcf" (UID: "c78c0950-654a-40c4-8ae4-bf213130fbcf"). InnerVolumeSpecName "kube-api-access-gvxgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.189551 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gvxgp\" (UniqueName: \"kubernetes.io/projected/c78c0950-654a-40c4-8ae4-bf213130fbcf-kube-api-access-gvxgp\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.212986 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c78c0950-654a-40c4-8ae4-bf213130fbcf" (UID: "c78c0950-654a-40c4-8ae4-bf213130fbcf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.304908 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.314086 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "c78c0950-654a-40c4-8ae4-bf213130fbcf" (UID: "c78c0950-654a-40c4-8ae4-bf213130fbcf"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.314333 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c78c0950-654a-40c4-8ae4-bf213130fbcf" (UID: "c78c0950-654a-40c4-8ae4-bf213130fbcf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.345739 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-config" (OuterVolumeSpecName: "config") pod "c78c0950-654a-40c4-8ae4-bf213130fbcf" (UID: "c78c0950-654a-40c4-8ae4-bf213130fbcf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.346704 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c78c0950-654a-40c4-8ae4-bf213130fbcf" (UID: "c78c0950-654a-40c4-8ae4-bf213130fbcf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.367110 4982 generic.go:334] "Generic (PLEG): container finished" podID="c78c0950-654a-40c4-8ae4-bf213130fbcf" containerID="5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6" exitCode=0 Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.367557 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.367790 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" event={"ID":"c78c0950-654a-40c4-8ae4-bf213130fbcf","Type":"ContainerDied","Data":"5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6"} Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.367845 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9cb78d75-t4n2t" event={"ID":"c78c0950-654a-40c4-8ae4-bf213130fbcf","Type":"ContainerDied","Data":"9a80bae82d193ced15a59edfc36c1531773341018384210173573f97f63dd51b"} Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.367870 4982 scope.go:117] "RemoveContainer" containerID="5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.406359 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.406387 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.406397 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.406406 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c78c0950-654a-40c4-8ae4-bf213130fbcf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.422237 4982 scope.go:117] "RemoveContainer" containerID="4bf2f85f4611c96409523da9be9db5b5013983e1f2a528ce24c04aebeb2b2865" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.438089 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c9cb78d75-t4n2t"] Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.446425 4982 scope.go:117] "RemoveContainer" containerID="5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6" Dec 05 19:38:20 crc kubenswrapper[4982]: E1205 19:38:20.446911 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6\": container with ID starting with 5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6 not found: ID does not exist" containerID="5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.446956 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6"} err="failed to get container status \"5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6\": rpc error: code = NotFound desc = could not find container \"5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6\": container with ID starting with 5bda11665c7e649329ad62583a996896c132d42bd01c70788637cded40e6caa6 not found: ID does not exist" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.446983 4982 scope.go:117] "RemoveContainer" containerID="4bf2f85f4611c96409523da9be9db5b5013983e1f2a528ce24c04aebeb2b2865" Dec 05 19:38:20 crc kubenswrapper[4982]: E1205 19:38:20.447443 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bf2f85f4611c96409523da9be9db5b5013983e1f2a528ce24c04aebeb2b2865\": container with ID starting with 4bf2f85f4611c96409523da9be9db5b5013983e1f2a528ce24c04aebeb2b2865 not found: ID does not exist" containerID="4bf2f85f4611c96409523da9be9db5b5013983e1f2a528ce24c04aebeb2b2865" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.447475 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bf2f85f4611c96409523da9be9db5b5013983e1f2a528ce24c04aebeb2b2865"} err="failed to get container status \"4bf2f85f4611c96409523da9be9db5b5013983e1f2a528ce24c04aebeb2b2865\": rpc error: code = NotFound desc = could not find container \"4bf2f85f4611c96409523da9be9db5b5013983e1f2a528ce24c04aebeb2b2865\": container with ID starting with 4bf2f85f4611c96409523da9be9db5b5013983e1f2a528ce24c04aebeb2b2865 not found: ID does not exist" Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.450410 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c9cb78d75-t4n2t"] Dec 05 19:38:20 crc kubenswrapper[4982]: I1205 19:38:20.657532 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-lvhmg"] Dec 05 19:38:20 crc kubenswrapper[4982]: W1205 19:38:20.677701 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcb75aca9_9973_41f0_8bf2_02a97b01f57f.slice/crio-890f7ceb48a670fd58f9ee995a0738d60118692444bbab9300273fd4f51b671c WatchSource:0}: Error finding container 890f7ceb48a670fd58f9ee995a0738d60118692444bbab9300273fd4f51b671c: Status 404 returned error can't find the container with id 890f7ceb48a670fd58f9ee995a0738d60118692444bbab9300273fd4f51b671c Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.141422 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.223694 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-scripts\") pod \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.224129 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-log-httpd\") pod \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.224252 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-run-httpd\") pod \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.224284 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6679w\" (UniqueName: \"kubernetes.io/projected/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-kube-api-access-6679w\") pod \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.224310 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-sg-core-conf-yaml\") pod \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.224409 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-config-data\") pod \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.224439 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-combined-ca-bundle\") pod \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.224514 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-ceilometer-tls-certs\") pod \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\" (UID: \"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b\") " Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.224694 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" (UID: "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.224762 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" (UID: "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.225500 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.225522 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.228500 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-kube-api-access-6679w" (OuterVolumeSpecName: "kube-api-access-6679w") pod "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" (UID: "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b"). InnerVolumeSpecName "kube-api-access-6679w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.228702 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-scripts" (OuterVolumeSpecName: "scripts") pod "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" (UID: "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.253769 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" (UID: "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.295159 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" (UID: "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.311820 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" (UID: "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.327139 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.327193 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6679w\" (UniqueName: \"kubernetes.io/projected/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-kube-api-access-6679w\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.327208 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.327450 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.327472 4982 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.347205 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-config-data" (OuterVolumeSpecName: "config-data") pod "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" (UID: "6c25395b-4bfb-4a7a-a0c6-b2de0012da9b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.385134 4982 generic.go:334] "Generic (PLEG): container finished" podID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerID="b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b" exitCode=0 Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.385281 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.388212 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b","Type":"ContainerDied","Data":"b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b"} Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.388269 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6c25395b-4bfb-4a7a-a0c6-b2de0012da9b","Type":"ContainerDied","Data":"8b52f18314f0a95ca1aa1ea46f0c2a192c40071880df47ab67499a9844aac850"} Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.388294 4982 scope.go:117] "RemoveContainer" containerID="0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.410227 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c78c0950-654a-40c4-8ae4-bf213130fbcf" path="/var/lib/kubelet/pods/c78c0950-654a-40c4-8ae4-bf213130fbcf/volumes" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.411327 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lvhmg" event={"ID":"cb75aca9-9973-41f0-8bf2-02a97b01f57f","Type":"ContainerStarted","Data":"1bfe36a950e35eb890eeab03f09d8d2abead59a0605cb899bc55c6b4e1ae3ff7"} Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.411359 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lvhmg" event={"ID":"cb75aca9-9973-41f0-8bf2-02a97b01f57f","Type":"ContainerStarted","Data":"890f7ceb48a670fd58f9ee995a0738d60118692444bbab9300273fd4f51b671c"} Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.418165 4982 scope.go:117] "RemoveContainer" containerID="6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.429813 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.442187 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-lvhmg" podStartSLOduration=2.442140296 podStartE2EDuration="2.442140296s" podCreationTimestamp="2025-12-05 19:38:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:38:21.433937449 +0000 UTC m=+1480.315823444" watchObservedRunningTime="2025-12-05 19:38:21.442140296 +0000 UTC m=+1480.324026291" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.454826 4982 scope.go:117] "RemoveContainer" containerID="7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.458060 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.467807 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.484646 4982 scope.go:117] "RemoveContainer" containerID="b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.485652 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:38:21 crc kubenswrapper[4982]: E1205 19:38:21.486223 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="ceilometer-notification-agent" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.486325 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="ceilometer-notification-agent" Dec 05 19:38:21 crc kubenswrapper[4982]: E1205 19:38:21.486416 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c78c0950-654a-40c4-8ae4-bf213130fbcf" containerName="init" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.486505 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c78c0950-654a-40c4-8ae4-bf213130fbcf" containerName="init" Dec 05 19:38:21 crc kubenswrapper[4982]: E1205 19:38:21.486603 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="proxy-httpd" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.486682 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="proxy-httpd" Dec 05 19:38:21 crc kubenswrapper[4982]: E1205 19:38:21.486818 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="ceilometer-central-agent" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.486902 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="ceilometer-central-agent" Dec 05 19:38:21 crc kubenswrapper[4982]: E1205 19:38:21.487055 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c78c0950-654a-40c4-8ae4-bf213130fbcf" containerName="dnsmasq-dns" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.487162 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c78c0950-654a-40c4-8ae4-bf213130fbcf" containerName="dnsmasq-dns" Dec 05 19:38:21 crc kubenswrapper[4982]: E1205 19:38:21.487249 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="sg-core" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.487324 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="sg-core" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.487687 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="ceilometer-notification-agent" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.487798 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="ceilometer-central-agent" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.488798 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="sg-core" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.488926 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" containerName="proxy-httpd" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.489025 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c78c0950-654a-40c4-8ae4-bf213130fbcf" containerName="dnsmasq-dns" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.492035 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.495591 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.496090 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.496604 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.529627 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.531370 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.531525 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-scripts\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.531621 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.531755 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.531893 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnrln\" (UniqueName: \"kubernetes.io/projected/4f253ee0-dab6-43ac-83b0-9d6271963e99-kube-api-access-rnrln\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.531993 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4f253ee0-dab6-43ac-83b0-9d6271963e99-log-httpd\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.532136 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-config-data\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.532255 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4f253ee0-dab6-43ac-83b0-9d6271963e99-run-httpd\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.596388 4982 scope.go:117] "RemoveContainer" containerID="0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70" Dec 05 19:38:21 crc kubenswrapper[4982]: E1205 19:38:21.596931 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70\": container with ID starting with 0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70 not found: ID does not exist" containerID="0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.596970 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70"} err="failed to get container status \"0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70\": rpc error: code = NotFound desc = could not find container \"0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70\": container with ID starting with 0071cad9d694d9beb0dc4cbe880a1e80c0725d1dde4cfc8dff7fb049a06c5b70 not found: ID does not exist" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.596994 4982 scope.go:117] "RemoveContainer" containerID="6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1" Dec 05 19:38:21 crc kubenswrapper[4982]: E1205 19:38:21.597420 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1\": container with ID starting with 6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1 not found: ID does not exist" containerID="6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.597450 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1"} err="failed to get container status \"6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1\": rpc error: code = NotFound desc = could not find container \"6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1\": container with ID starting with 6a46fc974ba66cc2d95396086e92fb035be687cbc59a79ee9ce4b266aa7c03b1 not found: ID does not exist" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.597472 4982 scope.go:117] "RemoveContainer" containerID="7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71" Dec 05 19:38:21 crc kubenswrapper[4982]: E1205 19:38:21.597741 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71\": container with ID starting with 7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71 not found: ID does not exist" containerID="7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.597760 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71"} err="failed to get container status \"7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71\": rpc error: code = NotFound desc = could not find container \"7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71\": container with ID starting with 7b554de75df0cb2cd8574881310b767e048a174d4aa8aee47c9639b053c60c71 not found: ID does not exist" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.597774 4982 scope.go:117] "RemoveContainer" containerID="b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b" Dec 05 19:38:21 crc kubenswrapper[4982]: E1205 19:38:21.597985 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b\": container with ID starting with b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b not found: ID does not exist" containerID="b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.598005 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b"} err="failed to get container status \"b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b\": rpc error: code = NotFound desc = could not find container \"b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b\": container with ID starting with b4922a30fe65d97a9b9bad0b3d57b8f70307b4549b1dc7a10d4a53c21148361b not found: ID does not exist" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.634558 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.634610 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-scripts\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.634631 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.634659 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.634705 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnrln\" (UniqueName: \"kubernetes.io/projected/4f253ee0-dab6-43ac-83b0-9d6271963e99-kube-api-access-rnrln\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.634726 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4f253ee0-dab6-43ac-83b0-9d6271963e99-log-httpd\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.634772 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-config-data\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.634787 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4f253ee0-dab6-43ac-83b0-9d6271963e99-run-httpd\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.635585 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4f253ee0-dab6-43ac-83b0-9d6271963e99-run-httpd\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.635712 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4f253ee0-dab6-43ac-83b0-9d6271963e99-log-httpd\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.639088 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-scripts\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.639545 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-config-data\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.639688 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.639866 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.641752 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.658499 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnrln\" (UniqueName: \"kubernetes.io/projected/4f253ee0-dab6-43ac-83b0-9d6271963e99-kube-api-access-rnrln\") pod \"ceilometer-0\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " pod="openstack/ceilometer-0" Dec 05 19:38:21 crc kubenswrapper[4982]: I1205 19:38:21.889735 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:38:22 crc kubenswrapper[4982]: I1205 19:38:22.432241 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:38:22 crc kubenswrapper[4982]: W1205 19:38:22.445043 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4f253ee0_dab6_43ac_83b0_9d6271963e99.slice/crio-4463aa2549f3abd75279214ba42ffcb074d16bfc710351b018209f030f7e93c8 WatchSource:0}: Error finding container 4463aa2549f3abd75279214ba42ffcb074d16bfc710351b018209f030f7e93c8: Status 404 returned error can't find the container with id 4463aa2549f3abd75279214ba42ffcb074d16bfc710351b018209f030f7e93c8 Dec 05 19:38:23 crc kubenswrapper[4982]: I1205 19:38:23.408289 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6c25395b-4bfb-4a7a-a0c6-b2de0012da9b" path="/var/lib/kubelet/pods/6c25395b-4bfb-4a7a-a0c6-b2de0012da9b/volumes" Dec 05 19:38:23 crc kubenswrapper[4982]: I1205 19:38:23.430250 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4f253ee0-dab6-43ac-83b0-9d6271963e99","Type":"ContainerStarted","Data":"7eede677cdcc059cd5db393561740c918e76032841a0251a6610285e808b196a"} Dec 05 19:38:23 crc kubenswrapper[4982]: I1205 19:38:23.430294 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4f253ee0-dab6-43ac-83b0-9d6271963e99","Type":"ContainerStarted","Data":"4463aa2549f3abd75279214ba42ffcb074d16bfc710351b018209f030f7e93c8"} Dec 05 19:38:24 crc kubenswrapper[4982]: I1205 19:38:24.443597 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4f253ee0-dab6-43ac-83b0-9d6271963e99","Type":"ContainerStarted","Data":"b59ac17d2b43acb76e9c01473bc60c78edae411feffc0257a29777bb3d2fe10f"} Dec 05 19:38:25 crc kubenswrapper[4982]: I1205 19:38:25.455755 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4f253ee0-dab6-43ac-83b0-9d6271963e99","Type":"ContainerStarted","Data":"38c72eabc49fd754ec0f939bc8a7e7ad84405016ca4e32ca4fa3994bba7e639f"} Dec 05 19:38:26 crc kubenswrapper[4982]: I1205 19:38:26.469906 4982 generic.go:334] "Generic (PLEG): container finished" podID="cb75aca9-9973-41f0-8bf2-02a97b01f57f" containerID="1bfe36a950e35eb890eeab03f09d8d2abead59a0605cb899bc55c6b4e1ae3ff7" exitCode=0 Dec 05 19:38:26 crc kubenswrapper[4982]: I1205 19:38:26.469954 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lvhmg" event={"ID":"cb75aca9-9973-41f0-8bf2-02a97b01f57f","Type":"ContainerDied","Data":"1bfe36a950e35eb890eeab03f09d8d2abead59a0605cb899bc55c6b4e1ae3ff7"} Dec 05 19:38:26 crc kubenswrapper[4982]: I1205 19:38:26.474769 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4f253ee0-dab6-43ac-83b0-9d6271963e99","Type":"ContainerStarted","Data":"09fab5474cba184ef492d415851f71589b3666adf371ef1c5240d59b4f99b59a"} Dec 05 19:38:26 crc kubenswrapper[4982]: I1205 19:38:26.474949 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 19:38:26 crc kubenswrapper[4982]: I1205 19:38:26.719010 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 19:38:26 crc kubenswrapper[4982]: I1205 19:38:26.719064 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 19:38:27 crc kubenswrapper[4982]: I1205 19:38:27.734654 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="96dba84f-7287-43fc-80d4-80e9c4af787c" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.226:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 19:38:27 crc kubenswrapper[4982]: I1205 19:38:27.735165 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="96dba84f-7287-43fc-80d4-80e9c4af787c" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.226:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 19:38:27 crc kubenswrapper[4982]: I1205 19:38:27.950907 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:27 crc kubenswrapper[4982]: I1205 19:38:27.974278 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-combined-ca-bundle\") pod \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " Dec 05 19:38:27 crc kubenswrapper[4982]: I1205 19:38:27.974458 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pv2kv\" (UniqueName: \"kubernetes.io/projected/cb75aca9-9973-41f0-8bf2-02a97b01f57f-kube-api-access-pv2kv\") pod \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " Dec 05 19:38:27 crc kubenswrapper[4982]: I1205 19:38:27.974636 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-scripts\") pod \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " Dec 05 19:38:27 crc kubenswrapper[4982]: I1205 19:38:27.974749 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-config-data\") pod \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\" (UID: \"cb75aca9-9973-41f0-8bf2-02a97b01f57f\") " Dec 05 19:38:27 crc kubenswrapper[4982]: I1205 19:38:27.977197 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.811259295 podStartE2EDuration="6.97717127s" podCreationTimestamp="2025-12-05 19:38:21 +0000 UTC" firstStartedPulling="2025-12-05 19:38:22.447529109 +0000 UTC m=+1481.329415114" lastFinishedPulling="2025-12-05 19:38:25.613441094 +0000 UTC m=+1484.495327089" observedRunningTime="2025-12-05 19:38:26.521955225 +0000 UTC m=+1485.403841230" watchObservedRunningTime="2025-12-05 19:38:27.97717127 +0000 UTC m=+1486.859057285" Dec 05 19:38:27 crc kubenswrapper[4982]: I1205 19:38:27.982652 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-scripts" (OuterVolumeSpecName: "scripts") pod "cb75aca9-9973-41f0-8bf2-02a97b01f57f" (UID: "cb75aca9-9973-41f0-8bf2-02a97b01f57f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:27 crc kubenswrapper[4982]: I1205 19:38:27.983407 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb75aca9-9973-41f0-8bf2-02a97b01f57f-kube-api-access-pv2kv" (OuterVolumeSpecName: "kube-api-access-pv2kv") pod "cb75aca9-9973-41f0-8bf2-02a97b01f57f" (UID: "cb75aca9-9973-41f0-8bf2-02a97b01f57f"). InnerVolumeSpecName "kube-api-access-pv2kv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.014188 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cb75aca9-9973-41f0-8bf2-02a97b01f57f" (UID: "cb75aca9-9973-41f0-8bf2-02a97b01f57f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.031438 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-config-data" (OuterVolumeSpecName: "config-data") pod "cb75aca9-9973-41f0-8bf2-02a97b01f57f" (UID: "cb75aca9-9973-41f0-8bf2-02a97b01f57f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.077229 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.077262 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.077273 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pv2kv\" (UniqueName: \"kubernetes.io/projected/cb75aca9-9973-41f0-8bf2-02a97b01f57f-kube-api-access-pv2kv\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.077286 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb75aca9-9973-41f0-8bf2-02a97b01f57f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:28 crc kubenswrapper[4982]: E1205 19:38:28.416651 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1fb8006_cfc6_412d_90be_9bb828949621.slice/crio-cc1cfd00a28206cb512c50ac8a0a398520cbcfa35d8a233a092fc8b9518ee017\": RecentStats: unable to find data in memory cache]" Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.494901 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lvhmg" event={"ID":"cb75aca9-9973-41f0-8bf2-02a97b01f57f","Type":"ContainerDied","Data":"890f7ceb48a670fd58f9ee995a0738d60118692444bbab9300273fd4f51b671c"} Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.495228 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="890f7ceb48a670fd58f9ee995a0738d60118692444bbab9300273fd4f51b671c" Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.495310 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lvhmg" Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.535193 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.535425 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="96dba84f-7287-43fc-80d4-80e9c4af787c" containerName="nova-api-log" containerID="cri-o://b11d1b7382b2967e9292886664692c078e569976a36ef5dbf54831d3bd2b4c45" gracePeriod=30 Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.535810 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="96dba84f-7287-43fc-80d4-80e9c4af787c" containerName="nova-api-api" containerID="cri-o://b8cd0c581a91dd6efd1b62716bf596fe34ba36e7d79f921c91c2d630a930c66c" gracePeriod=30 Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.567268 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.567505 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="18d28c18-728c-4040-8ce6-0d8c15c3f45a" containerName="nova-scheduler-scheduler" containerID="cri-o://e1d936ecf422ea6da061c7835f42755dfa25ccc2220e7fc98a3e1f0619112542" gracePeriod=30 Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.575236 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.575469 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerName="nova-metadata-log" containerID="cri-o://78ca6eac509d404e57d12e2cb31eb1069784a31a727f5dcb18da5cbd2f9b6196" gracePeriod=30 Dec 05 19:38:28 crc kubenswrapper[4982]: I1205 19:38:28.575572 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerName="nova-metadata-metadata" containerID="cri-o://e9f7246a7c4fa824614f5fec433980deef71c1aa55fdffefa32cee40a5cc2d9c" gracePeriod=30 Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.510514 4982 generic.go:334] "Generic (PLEG): container finished" podID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerID="78ca6eac509d404e57d12e2cb31eb1069784a31a727f5dcb18da5cbd2f9b6196" exitCode=143 Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.510583 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e4dd66bd-d882-4475-9eb3-eb70e9081b59","Type":"ContainerDied","Data":"78ca6eac509d404e57d12e2cb31eb1069784a31a727f5dcb18da5cbd2f9b6196"} Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.512369 4982 generic.go:334] "Generic (PLEG): container finished" podID="18d28c18-728c-4040-8ce6-0d8c15c3f45a" containerID="e1d936ecf422ea6da061c7835f42755dfa25ccc2220e7fc98a3e1f0619112542" exitCode=0 Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.512446 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"18d28c18-728c-4040-8ce6-0d8c15c3f45a","Type":"ContainerDied","Data":"e1d936ecf422ea6da061c7835f42755dfa25ccc2220e7fc98a3e1f0619112542"} Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.515489 4982 generic.go:334] "Generic (PLEG): container finished" podID="96dba84f-7287-43fc-80d4-80e9c4af787c" containerID="b11d1b7382b2967e9292886664692c078e569976a36ef5dbf54831d3bd2b4c45" exitCode=143 Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.515532 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96dba84f-7287-43fc-80d4-80e9c4af787c","Type":"ContainerDied","Data":"b11d1b7382b2967e9292886664692c078e569976a36ef5dbf54831d3bd2b4c45"} Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.837963 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.927918 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzm2n\" (UniqueName: \"kubernetes.io/projected/18d28c18-728c-4040-8ce6-0d8c15c3f45a-kube-api-access-kzm2n\") pod \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\" (UID: \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\") " Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.927991 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18d28c18-728c-4040-8ce6-0d8c15c3f45a-combined-ca-bundle\") pod \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\" (UID: \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\") " Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.928107 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18d28c18-728c-4040-8ce6-0d8c15c3f45a-config-data\") pod \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\" (UID: \"18d28c18-728c-4040-8ce6-0d8c15c3f45a\") " Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.936350 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18d28c18-728c-4040-8ce6-0d8c15c3f45a-kube-api-access-kzm2n" (OuterVolumeSpecName: "kube-api-access-kzm2n") pod "18d28c18-728c-4040-8ce6-0d8c15c3f45a" (UID: "18d28c18-728c-4040-8ce6-0d8c15c3f45a"). InnerVolumeSpecName "kube-api-access-kzm2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.959219 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18d28c18-728c-4040-8ce6-0d8c15c3f45a-config-data" (OuterVolumeSpecName: "config-data") pod "18d28c18-728c-4040-8ce6-0d8c15c3f45a" (UID: "18d28c18-728c-4040-8ce6-0d8c15c3f45a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:29 crc kubenswrapper[4982]: I1205 19:38:29.961406 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18d28c18-728c-4040-8ce6-0d8c15c3f45a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "18d28c18-728c-4040-8ce6-0d8c15c3f45a" (UID: "18d28c18-728c-4040-8ce6-0d8c15c3f45a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.030393 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzm2n\" (UniqueName: \"kubernetes.io/projected/18d28c18-728c-4040-8ce6-0d8c15c3f45a-kube-api-access-kzm2n\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.030425 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18d28c18-728c-4040-8ce6-0d8c15c3f45a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.030439 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18d28c18-728c-4040-8ce6-0d8c15c3f45a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.532653 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"18d28c18-728c-4040-8ce6-0d8c15c3f45a","Type":"ContainerDied","Data":"ec95762bf50e9eed878ef178b436be101145e01370dc233811c9fbd29a5a24a4"} Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.533013 4982 scope.go:117] "RemoveContainer" containerID="e1d936ecf422ea6da061c7835f42755dfa25ccc2220e7fc98a3e1f0619112542" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.532765 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.615104 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.649228 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.670981 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:38:30 crc kubenswrapper[4982]: E1205 19:38:30.671452 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb75aca9-9973-41f0-8bf2-02a97b01f57f" containerName="nova-manage" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.671464 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb75aca9-9973-41f0-8bf2-02a97b01f57f" containerName="nova-manage" Dec 05 19:38:30 crc kubenswrapper[4982]: E1205 19:38:30.671474 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18d28c18-728c-4040-8ce6-0d8c15c3f45a" containerName="nova-scheduler-scheduler" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.671481 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="18d28c18-728c-4040-8ce6-0d8c15c3f45a" containerName="nova-scheduler-scheduler" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.671718 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb75aca9-9973-41f0-8bf2-02a97b01f57f" containerName="nova-manage" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.671737 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="18d28c18-728c-4040-8ce6-0d8c15c3f45a" containerName="nova-scheduler-scheduler" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.672429 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.675360 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.690299 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.743454 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2cfe605-218c-442d-b88b-8f2d7b3a6ba0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2cfe605-218c-442d-b88b-8f2d7b3a6ba0\") " pod="openstack/nova-scheduler-0" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.743505 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s975n\" (UniqueName: \"kubernetes.io/projected/c2cfe605-218c-442d-b88b-8f2d7b3a6ba0-kube-api-access-s975n\") pod \"nova-scheduler-0\" (UID: \"c2cfe605-218c-442d-b88b-8f2d7b3a6ba0\") " pod="openstack/nova-scheduler-0" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.743703 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2cfe605-218c-442d-b88b-8f2d7b3a6ba0-config-data\") pod \"nova-scheduler-0\" (UID: \"c2cfe605-218c-442d-b88b-8f2d7b3a6ba0\") " pod="openstack/nova-scheduler-0" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.844620 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2cfe605-218c-442d-b88b-8f2d7b3a6ba0-config-data\") pod \"nova-scheduler-0\" (UID: \"c2cfe605-218c-442d-b88b-8f2d7b3a6ba0\") " pod="openstack/nova-scheduler-0" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.844701 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2cfe605-218c-442d-b88b-8f2d7b3a6ba0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2cfe605-218c-442d-b88b-8f2d7b3a6ba0\") " pod="openstack/nova-scheduler-0" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.844727 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s975n\" (UniqueName: \"kubernetes.io/projected/c2cfe605-218c-442d-b88b-8f2d7b3a6ba0-kube-api-access-s975n\") pod \"nova-scheduler-0\" (UID: \"c2cfe605-218c-442d-b88b-8f2d7b3a6ba0\") " pod="openstack/nova-scheduler-0" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.850039 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c2cfe605-218c-442d-b88b-8f2d7b3a6ba0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c2cfe605-218c-442d-b88b-8f2d7b3a6ba0\") " pod="openstack/nova-scheduler-0" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.859307 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2cfe605-218c-442d-b88b-8f2d7b3a6ba0-config-data\") pod \"nova-scheduler-0\" (UID: \"c2cfe605-218c-442d-b88b-8f2d7b3a6ba0\") " pod="openstack/nova-scheduler-0" Dec 05 19:38:30 crc kubenswrapper[4982]: I1205 19:38:30.863084 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s975n\" (UniqueName: \"kubernetes.io/projected/c2cfe605-218c-442d-b88b-8f2d7b3a6ba0-kube-api-access-s975n\") pod \"nova-scheduler-0\" (UID: \"c2cfe605-218c-442d-b88b-8f2d7b3a6ba0\") " pod="openstack/nova-scheduler-0" Dec 05 19:38:31 crc kubenswrapper[4982]: I1205 19:38:31.000570 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 19:38:31 crc kubenswrapper[4982]: I1205 19:38:31.427112 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18d28c18-728c-4040-8ce6-0d8c15c3f45a" path="/var/lib/kubelet/pods/18d28c18-728c-4040-8ce6-0d8c15c3f45a/volumes" Dec 05 19:38:31 crc kubenswrapper[4982]: I1205 19:38:31.501597 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 19:38:31 crc kubenswrapper[4982]: W1205 19:38:31.505764 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2cfe605_218c_442d_b88b_8f2d7b3a6ba0.slice/crio-888129c5b1d87ab9255f66ef53fc113adbc4d72aed497122bf7c7f798601a73e WatchSource:0}: Error finding container 888129c5b1d87ab9255f66ef53fc113adbc4d72aed497122bf7c7f798601a73e: Status 404 returned error can't find the container with id 888129c5b1d87ab9255f66ef53fc113adbc4d72aed497122bf7c7f798601a73e Dec 05 19:38:31 crc kubenswrapper[4982]: I1205 19:38:31.543022 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2cfe605-218c-442d-b88b-8f2d7b3a6ba0","Type":"ContainerStarted","Data":"888129c5b1d87ab9255f66ef53fc113adbc4d72aed497122bf7c7f798601a73e"} Dec 05 19:38:31 crc kubenswrapper[4982]: I1205 19:38:31.713413 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.221:8775/\": read tcp 10.217.0.2:50976->10.217.0.221:8775: read: connection reset by peer" Dec 05 19:38:31 crc kubenswrapper[4982]: I1205 19:38:31.713438 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.221:8775/\": read tcp 10.217.0.2:50962->10.217.0.221:8775: read: connection reset by peer" Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.554629 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c2cfe605-218c-442d-b88b-8f2d7b3a6ba0","Type":"ContainerStarted","Data":"be0511db3e4efd5eaec9808296df6896cf2f978f4ec78cbbd07cff24e4a10b06"} Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.558730 4982 generic.go:334] "Generic (PLEG): container finished" podID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerID="e9f7246a7c4fa824614f5fec433980deef71c1aa55fdffefa32cee40a5cc2d9c" exitCode=0 Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.558774 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e4dd66bd-d882-4475-9eb3-eb70e9081b59","Type":"ContainerDied","Data":"e9f7246a7c4fa824614f5fec433980deef71c1aa55fdffefa32cee40a5cc2d9c"} Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.577100 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.577080354 podStartE2EDuration="2.577080354s" podCreationTimestamp="2025-12-05 19:38:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:38:32.569597875 +0000 UTC m=+1491.451483880" watchObservedRunningTime="2025-12-05 19:38:32.577080354 +0000 UTC m=+1491.458966349" Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.858222 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.985526 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-config-data\") pod \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.985852 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mrb9\" (UniqueName: \"kubernetes.io/projected/e4dd66bd-d882-4475-9eb3-eb70e9081b59-kube-api-access-6mrb9\") pod \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.985920 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4dd66bd-d882-4475-9eb3-eb70e9081b59-logs\") pod \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.986011 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-nova-metadata-tls-certs\") pod \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.986108 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-combined-ca-bundle\") pod \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\" (UID: \"e4dd66bd-d882-4475-9eb3-eb70e9081b59\") " Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.987216 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4dd66bd-d882-4475-9eb3-eb70e9081b59-logs" (OuterVolumeSpecName: "logs") pod "e4dd66bd-d882-4475-9eb3-eb70e9081b59" (UID: "e4dd66bd-d882-4475-9eb3-eb70e9081b59"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.987476 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e4dd66bd-d882-4475-9eb3-eb70e9081b59-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:32 crc kubenswrapper[4982]: I1205 19:38:32.996519 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4dd66bd-d882-4475-9eb3-eb70e9081b59-kube-api-access-6mrb9" (OuterVolumeSpecName: "kube-api-access-6mrb9") pod "e4dd66bd-d882-4475-9eb3-eb70e9081b59" (UID: "e4dd66bd-d882-4475-9eb3-eb70e9081b59"). InnerVolumeSpecName "kube-api-access-6mrb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.041263 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e4dd66bd-d882-4475-9eb3-eb70e9081b59" (UID: "e4dd66bd-d882-4475-9eb3-eb70e9081b59"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.043304 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-config-data" (OuterVolumeSpecName: "config-data") pod "e4dd66bd-d882-4475-9eb3-eb70e9081b59" (UID: "e4dd66bd-d882-4475-9eb3-eb70e9081b59"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.072593 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "e4dd66bd-d882-4475-9eb3-eb70e9081b59" (UID: "e4dd66bd-d882-4475-9eb3-eb70e9081b59"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.090047 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.090092 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mrb9\" (UniqueName: \"kubernetes.io/projected/e4dd66bd-d882-4475-9eb3-eb70e9081b59-kube-api-access-6mrb9\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.090109 4982 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.090126 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4dd66bd-d882-4475-9eb3-eb70e9081b59-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.576376 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e4dd66bd-d882-4475-9eb3-eb70e9081b59","Type":"ContainerDied","Data":"e4d603032006db6e8962a27c7f11427b9101805861e6663eb52f4717b27923ff"} Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.576733 4982 scope.go:117] "RemoveContainer" containerID="e9f7246a7c4fa824614f5fec433980deef71c1aa55fdffefa32cee40a5cc2d9c" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.576898 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.583107 4982 generic.go:334] "Generic (PLEG): container finished" podID="96dba84f-7287-43fc-80d4-80e9c4af787c" containerID="b8cd0c581a91dd6efd1b62716bf596fe34ba36e7d79f921c91c2d630a930c66c" exitCode=0 Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.585294 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96dba84f-7287-43fc-80d4-80e9c4af787c","Type":"ContainerDied","Data":"b8cd0c581a91dd6efd1b62716bf596fe34ba36e7d79f921c91c2d630a930c66c"} Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.585359 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"96dba84f-7287-43fc-80d4-80e9c4af787c","Type":"ContainerDied","Data":"5e1756b98ab9c9cf4230db349974c4983a2dd014c9de527738ac4aa6cdfe045a"} Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.585371 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e1756b98ab9c9cf4230db349974c4983a2dd014c9de527738ac4aa6cdfe045a" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.666194 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.676862 4982 scope.go:117] "RemoveContainer" containerID="78ca6eac509d404e57d12e2cb31eb1069784a31a727f5dcb18da5cbd2f9b6196" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.692613 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.706071 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.732889 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:38:33 crc kubenswrapper[4982]: E1205 19:38:33.733362 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96dba84f-7287-43fc-80d4-80e9c4af787c" containerName="nova-api-log" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.733374 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="96dba84f-7287-43fc-80d4-80e9c4af787c" containerName="nova-api-log" Dec 05 19:38:33 crc kubenswrapper[4982]: E1205 19:38:33.733388 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerName="nova-metadata-log" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.733394 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerName="nova-metadata-log" Dec 05 19:38:33 crc kubenswrapper[4982]: E1205 19:38:33.733410 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerName="nova-metadata-metadata" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.733416 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerName="nova-metadata-metadata" Dec 05 19:38:33 crc kubenswrapper[4982]: E1205 19:38:33.733427 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96dba84f-7287-43fc-80d4-80e9c4af787c" containerName="nova-api-api" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.733432 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="96dba84f-7287-43fc-80d4-80e9c4af787c" containerName="nova-api-api" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.733652 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="96dba84f-7287-43fc-80d4-80e9c4af787c" containerName="nova-api-api" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.733681 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerName="nova-metadata-log" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.733693 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="96dba84f-7287-43fc-80d4-80e9c4af787c" containerName="nova-api-log" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.733703 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" containerName="nova-metadata-metadata" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.734896 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.737114 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.737405 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.745275 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.807555 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-internal-tls-certs\") pod \"96dba84f-7287-43fc-80d4-80e9c4af787c\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.807831 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-config-data\") pod \"96dba84f-7287-43fc-80d4-80e9c4af787c\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.808384 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-combined-ca-bundle\") pod \"96dba84f-7287-43fc-80d4-80e9c4af787c\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.808485 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-public-tls-certs\") pod \"96dba84f-7287-43fc-80d4-80e9c4af787c\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.808527 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7wpbc\" (UniqueName: \"kubernetes.io/projected/96dba84f-7287-43fc-80d4-80e9c4af787c-kube-api-access-7wpbc\") pod \"96dba84f-7287-43fc-80d4-80e9c4af787c\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.808568 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96dba84f-7287-43fc-80d4-80e9c4af787c-logs\") pod \"96dba84f-7287-43fc-80d4-80e9c4af787c\" (UID: \"96dba84f-7287-43fc-80d4-80e9c4af787c\") " Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.808794 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a196cf68-e14d-4171-90f5-a266a8313f72-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.808858 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbth9\" (UniqueName: \"kubernetes.io/projected/a196cf68-e14d-4171-90f5-a266a8313f72-kube-api-access-sbth9\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.808909 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a196cf68-e14d-4171-90f5-a266a8313f72-logs\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.808961 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a196cf68-e14d-4171-90f5-a266a8313f72-config-data\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.808982 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a196cf68-e14d-4171-90f5-a266a8313f72-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.809209 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/96dba84f-7287-43fc-80d4-80e9c4af787c-logs" (OuterVolumeSpecName: "logs") pod "96dba84f-7287-43fc-80d4-80e9c4af787c" (UID: "96dba84f-7287-43fc-80d4-80e9c4af787c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.812697 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96dba84f-7287-43fc-80d4-80e9c4af787c-kube-api-access-7wpbc" (OuterVolumeSpecName: "kube-api-access-7wpbc") pod "96dba84f-7287-43fc-80d4-80e9c4af787c" (UID: "96dba84f-7287-43fc-80d4-80e9c4af787c"). InnerVolumeSpecName "kube-api-access-7wpbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.839454 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-config-data" (OuterVolumeSpecName: "config-data") pod "96dba84f-7287-43fc-80d4-80e9c4af787c" (UID: "96dba84f-7287-43fc-80d4-80e9c4af787c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.843202 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "96dba84f-7287-43fc-80d4-80e9c4af787c" (UID: "96dba84f-7287-43fc-80d4-80e9c4af787c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.862259 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "96dba84f-7287-43fc-80d4-80e9c4af787c" (UID: "96dba84f-7287-43fc-80d4-80e9c4af787c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.866847 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "96dba84f-7287-43fc-80d4-80e9c4af787c" (UID: "96dba84f-7287-43fc-80d4-80e9c4af787c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.910759 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a196cf68-e14d-4171-90f5-a266a8313f72-logs\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.910832 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a196cf68-e14d-4171-90f5-a266a8313f72-config-data\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.910857 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a196cf68-e14d-4171-90f5-a266a8313f72-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.910939 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a196cf68-e14d-4171-90f5-a266a8313f72-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.910991 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbth9\" (UniqueName: \"kubernetes.io/projected/a196cf68-e14d-4171-90f5-a266a8313f72-kube-api-access-sbth9\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.911056 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.911068 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.911077 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.911085 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/96dba84f-7287-43fc-80d4-80e9c4af787c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.911093 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7wpbc\" (UniqueName: \"kubernetes.io/projected/96dba84f-7287-43fc-80d4-80e9c4af787c-kube-api-access-7wpbc\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.911103 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/96dba84f-7287-43fc-80d4-80e9c4af787c-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.911765 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a196cf68-e14d-4171-90f5-a266a8313f72-logs\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.914652 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a196cf68-e14d-4171-90f5-a266a8313f72-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.914869 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a196cf68-e14d-4171-90f5-a266a8313f72-config-data\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.916665 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a196cf68-e14d-4171-90f5-a266a8313f72-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:33 crc kubenswrapper[4982]: I1205 19:38:33.926088 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbth9\" (UniqueName: \"kubernetes.io/projected/a196cf68-e14d-4171-90f5-a266a8313f72-kube-api-access-sbth9\") pod \"nova-metadata-0\" (UID: \"a196cf68-e14d-4171-90f5-a266a8313f72\") " pod="openstack/nova-metadata-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.054485 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.528809 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.602874 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a196cf68-e14d-4171-90f5-a266a8313f72","Type":"ContainerStarted","Data":"4fa8316bfc114f474020b812bd198a15bc641899f924bcc512404e5e48e29c9a"} Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.604636 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.682685 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.694815 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.705854 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.708089 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.710195 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.711219 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.711722 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.718799 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.829656 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf71bf8-6e74-4fec-a151-372e1044b69f-config-data\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.829693 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf71bf8-6e74-4fec-a151-372e1044b69f-public-tls-certs\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.829760 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf71bf8-6e74-4fec-a151-372e1044b69f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.829784 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eaf71bf8-6e74-4fec-a151-372e1044b69f-logs\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.829858 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf71bf8-6e74-4fec-a151-372e1044b69f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.829881 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m7nlm\" (UniqueName: \"kubernetes.io/projected/eaf71bf8-6e74-4fec-a151-372e1044b69f-kube-api-access-m7nlm\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.932069 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf71bf8-6e74-4fec-a151-372e1044b69f-config-data\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.932105 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf71bf8-6e74-4fec-a151-372e1044b69f-public-tls-certs\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.932196 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf71bf8-6e74-4fec-a151-372e1044b69f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.932223 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eaf71bf8-6e74-4fec-a151-372e1044b69f-logs\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.932307 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf71bf8-6e74-4fec-a151-372e1044b69f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.932331 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m7nlm\" (UniqueName: \"kubernetes.io/projected/eaf71bf8-6e74-4fec-a151-372e1044b69f-kube-api-access-m7nlm\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.933019 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eaf71bf8-6e74-4fec-a151-372e1044b69f-logs\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.935526 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaf71bf8-6e74-4fec-a151-372e1044b69f-config-data\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.936239 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaf71bf8-6e74-4fec-a151-372e1044b69f-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.936639 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf71bf8-6e74-4fec-a151-372e1044b69f-public-tls-certs\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.937580 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/eaf71bf8-6e74-4fec-a151-372e1044b69f-internal-tls-certs\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:34 crc kubenswrapper[4982]: I1205 19:38:34.949096 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m7nlm\" (UniqueName: \"kubernetes.io/projected/eaf71bf8-6e74-4fec-a151-372e1044b69f-kube-api-access-m7nlm\") pod \"nova-api-0\" (UID: \"eaf71bf8-6e74-4fec-a151-372e1044b69f\") " pod="openstack/nova-api-0" Dec 05 19:38:35 crc kubenswrapper[4982]: I1205 19:38:35.043202 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 19:38:35 crc kubenswrapper[4982]: I1205 19:38:35.405401 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96dba84f-7287-43fc-80d4-80e9c4af787c" path="/var/lib/kubelet/pods/96dba84f-7287-43fc-80d4-80e9c4af787c/volumes" Dec 05 19:38:35 crc kubenswrapper[4982]: I1205 19:38:35.406401 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4dd66bd-d882-4475-9eb3-eb70e9081b59" path="/var/lib/kubelet/pods/e4dd66bd-d882-4475-9eb3-eb70e9081b59/volumes" Dec 05 19:38:35 crc kubenswrapper[4982]: I1205 19:38:35.529236 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 19:38:35 crc kubenswrapper[4982]: W1205 19:38:35.530884 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeaf71bf8_6e74_4fec_a151_372e1044b69f.slice/crio-6967c84f18599590fe84d3c5364cece87db128739727057992feb10ff32c73cf WatchSource:0}: Error finding container 6967c84f18599590fe84d3c5364cece87db128739727057992feb10ff32c73cf: Status 404 returned error can't find the container with id 6967c84f18599590fe84d3c5364cece87db128739727057992feb10ff32c73cf Dec 05 19:38:35 crc kubenswrapper[4982]: I1205 19:38:35.617641 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a196cf68-e14d-4171-90f5-a266a8313f72","Type":"ContainerStarted","Data":"6c02317b2d3d0f5c092ae8fb40fcdb4cae0f8e529a46c5b39290d60588639cf6"} Dec 05 19:38:35 crc kubenswrapper[4982]: I1205 19:38:35.617999 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a196cf68-e14d-4171-90f5-a266a8313f72","Type":"ContainerStarted","Data":"e2cfc1194a4b3b4f2b6381ac5cc5333114206288ef9d58021322c98d73b654d4"} Dec 05 19:38:35 crc kubenswrapper[4982]: I1205 19:38:35.620597 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eaf71bf8-6e74-4fec-a151-372e1044b69f","Type":"ContainerStarted","Data":"6967c84f18599590fe84d3c5364cece87db128739727057992feb10ff32c73cf"} Dec 05 19:38:35 crc kubenswrapper[4982]: I1205 19:38:35.637271 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.637254741 podStartE2EDuration="2.637254741s" podCreationTimestamp="2025-12-05 19:38:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:38:35.635536957 +0000 UTC m=+1494.517422962" watchObservedRunningTime="2025-12-05 19:38:35.637254741 +0000 UTC m=+1494.519140746" Dec 05 19:38:36 crc kubenswrapper[4982]: I1205 19:38:36.000921 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 19:38:36 crc kubenswrapper[4982]: I1205 19:38:36.633587 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eaf71bf8-6e74-4fec-a151-372e1044b69f","Type":"ContainerStarted","Data":"dcdab91044d608b8295bab88f230d82793e8f58619f919b177202862492ccca4"} Dec 05 19:38:36 crc kubenswrapper[4982]: I1205 19:38:36.633972 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eaf71bf8-6e74-4fec-a151-372e1044b69f","Type":"ContainerStarted","Data":"251d48cbf1dffa83fd3f945b354b6423b589eeb35674040260c5fec8d69196c0"} Dec 05 19:38:36 crc kubenswrapper[4982]: I1205 19:38:36.666326 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.666302595 podStartE2EDuration="2.666302595s" podCreationTimestamp="2025-12-05 19:38:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:38:36.65379884 +0000 UTC m=+1495.535684895" watchObservedRunningTime="2025-12-05 19:38:36.666302595 +0000 UTC m=+1495.548188600" Dec 05 19:38:38 crc kubenswrapper[4982]: E1205 19:38:38.760393 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1fb8006_cfc6_412d_90be_9bb828949621.slice/crio-cc1cfd00a28206cb512c50ac8a0a398520cbcfa35d8a233a092fc8b9518ee017\": RecentStats: unable to find data in memory cache]" Dec 05 19:38:39 crc kubenswrapper[4982]: I1205 19:38:39.054833 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 19:38:39 crc kubenswrapper[4982]: I1205 19:38:39.054885 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 19:38:41 crc kubenswrapper[4982]: I1205 19:38:41.001500 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 19:38:41 crc kubenswrapper[4982]: I1205 19:38:41.042682 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 19:38:41 crc kubenswrapper[4982]: I1205 19:38:41.726921 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 19:38:44 crc kubenswrapper[4982]: I1205 19:38:44.055521 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 19:38:44 crc kubenswrapper[4982]: I1205 19:38:44.055936 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 19:38:45 crc kubenswrapper[4982]: I1205 19:38:45.044278 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 19:38:45 crc kubenswrapper[4982]: I1205 19:38:45.045652 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 19:38:45 crc kubenswrapper[4982]: I1205 19:38:45.075376 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="a196cf68-e14d-4171-90f5-a266a8313f72" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.230:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 19:38:45 crc kubenswrapper[4982]: I1205 19:38:45.075389 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="a196cf68-e14d-4171-90f5-a266a8313f72" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.230:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 19:38:46 crc kubenswrapper[4982]: I1205 19:38:46.056288 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="eaf71bf8-6e74-4fec-a151-372e1044b69f" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.231:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 19:38:46 crc kubenswrapper[4982]: I1205 19:38:46.056367 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="eaf71bf8-6e74-4fec-a151-372e1044b69f" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.231:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 19:38:51 crc kubenswrapper[4982]: I1205 19:38:51.900437 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 19:38:54 crc kubenswrapper[4982]: I1205 19:38:54.060982 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 19:38:54 crc kubenswrapper[4982]: I1205 19:38:54.067946 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 19:38:54 crc kubenswrapper[4982]: I1205 19:38:54.069225 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 19:38:54 crc kubenswrapper[4982]: I1205 19:38:54.832854 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 19:38:55 crc kubenswrapper[4982]: I1205 19:38:55.067793 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 19:38:55 crc kubenswrapper[4982]: I1205 19:38:55.068374 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 19:38:55 crc kubenswrapper[4982]: I1205 19:38:55.070440 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 19:38:55 crc kubenswrapper[4982]: I1205 19:38:55.082079 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 19:38:55 crc kubenswrapper[4982]: I1205 19:38:55.839026 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 19:38:55 crc kubenswrapper[4982]: I1205 19:38:55.850597 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.062751 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-hf7s9"] Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.073525 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-hf7s9"] Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.177293 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-np8hc"] Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.178866 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.181713 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.191210 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-np8hc"] Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.329557 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ac18c894-f60b-4db2-80a2-82f23f52f9a2-certs\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.329632 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-combined-ca-bundle\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.329665 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c446r\" (UniqueName: \"kubernetes.io/projected/ac18c894-f60b-4db2-80a2-82f23f52f9a2-kube-api-access-c446r\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.329706 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-scripts\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.329803 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-config-data\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.432123 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-scripts\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.432311 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-config-data\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.432412 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ac18c894-f60b-4db2-80a2-82f23f52f9a2-certs\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.433194 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-combined-ca-bundle\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.433255 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c446r\" (UniqueName: \"kubernetes.io/projected/ac18c894-f60b-4db2-80a2-82f23f52f9a2-kube-api-access-c446r\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.438924 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-scripts\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.439077 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-combined-ca-bundle\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.440049 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-config-data\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.441611 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ac18c894-f60b-4db2-80a2-82f23f52f9a2-certs\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.449597 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c446r\" (UniqueName: \"kubernetes.io/projected/ac18c894-f60b-4db2-80a2-82f23f52f9a2-kube-api-access-c446r\") pod \"cloudkitty-db-sync-np8hc\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:06 crc kubenswrapper[4982]: I1205 19:39:06.509289 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:07 crc kubenswrapper[4982]: I1205 19:39:07.034501 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-np8hc"] Dec 05 19:39:07 crc kubenswrapper[4982]: I1205 19:39:07.415932 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58e0a579-42f9-40b8-a0b4-13902c0fe8c7" path="/var/lib/kubelet/pods/58e0a579-42f9-40b8-a0b4-13902c0fe8c7/volumes" Dec 05 19:39:07 crc kubenswrapper[4982]: I1205 19:39:07.964198 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:39:07 crc kubenswrapper[4982]: I1205 19:39:07.964524 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="ceilometer-central-agent" containerID="cri-o://7eede677cdcc059cd5db393561740c918e76032841a0251a6610285e808b196a" gracePeriod=30 Dec 05 19:39:07 crc kubenswrapper[4982]: I1205 19:39:07.964644 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="sg-core" containerID="cri-o://38c72eabc49fd754ec0f939bc8a7e7ad84405016ca4e32ca4fa3994bba7e639f" gracePeriod=30 Dec 05 19:39:07 crc kubenswrapper[4982]: I1205 19:39:07.964756 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="ceilometer-notification-agent" containerID="cri-o://b59ac17d2b43acb76e9c01473bc60c78edae411feffc0257a29777bb3d2fe10f" gracePeriod=30 Dec 05 19:39:07 crc kubenswrapper[4982]: I1205 19:39:07.964814 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="proxy-httpd" containerID="cri-o://09fab5474cba184ef492d415851f71589b3666adf371ef1c5240d59b4f99b59a" gracePeriod=30 Dec 05 19:39:08 crc kubenswrapper[4982]: I1205 19:39:08.003144 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-np8hc" event={"ID":"ac18c894-f60b-4db2-80a2-82f23f52f9a2","Type":"ContainerStarted","Data":"2a7287883ddb3d70c850f7988bdd66d98bbb785b86b35c6de1c58667e800a65a"} Dec 05 19:39:08 crc kubenswrapper[4982]: I1205 19:39:08.385905 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 19:39:09 crc kubenswrapper[4982]: I1205 19:39:09.017548 4982 generic.go:334] "Generic (PLEG): container finished" podID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerID="09fab5474cba184ef492d415851f71589b3666adf371ef1c5240d59b4f99b59a" exitCode=0 Dec 05 19:39:09 crc kubenswrapper[4982]: I1205 19:39:09.017580 4982 generic.go:334] "Generic (PLEG): container finished" podID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerID="38c72eabc49fd754ec0f939bc8a7e7ad84405016ca4e32ca4fa3994bba7e639f" exitCode=2 Dec 05 19:39:09 crc kubenswrapper[4982]: I1205 19:39:09.017589 4982 generic.go:334] "Generic (PLEG): container finished" podID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerID="7eede677cdcc059cd5db393561740c918e76032841a0251a6610285e808b196a" exitCode=0 Dec 05 19:39:09 crc kubenswrapper[4982]: I1205 19:39:09.017598 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4f253ee0-dab6-43ac-83b0-9d6271963e99","Type":"ContainerDied","Data":"09fab5474cba184ef492d415851f71589b3666adf371ef1c5240d59b4f99b59a"} Dec 05 19:39:09 crc kubenswrapper[4982]: I1205 19:39:09.017642 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4f253ee0-dab6-43ac-83b0-9d6271963e99","Type":"ContainerDied","Data":"38c72eabc49fd754ec0f939bc8a7e7ad84405016ca4e32ca4fa3994bba7e639f"} Dec 05 19:39:09 crc kubenswrapper[4982]: I1205 19:39:09.017759 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4f253ee0-dab6-43ac-83b0-9d6271963e99","Type":"ContainerDied","Data":"7eede677cdcc059cd5db393561740c918e76032841a0251a6610285e808b196a"} Dec 05 19:39:09 crc kubenswrapper[4982]: I1205 19:39:09.208188 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 19:39:12 crc kubenswrapper[4982]: I1205 19:39:12.055644 4982 generic.go:334] "Generic (PLEG): container finished" podID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerID="b59ac17d2b43acb76e9c01473bc60c78edae411feffc0257a29777bb3d2fe10f" exitCode=0 Dec 05 19:39:12 crc kubenswrapper[4982]: I1205 19:39:12.055723 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4f253ee0-dab6-43ac-83b0-9d6271963e99","Type":"ContainerDied","Data":"b59ac17d2b43acb76e9c01473bc60c78edae411feffc0257a29777bb3d2fe10f"} Dec 05 19:39:12 crc kubenswrapper[4982]: I1205 19:39:12.908959 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:39:12 crc kubenswrapper[4982]: I1205 19:39:12.944442 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="145ed592-ad9f-487f-940e-71b78c2f48e1" containerName="rabbitmq" containerID="cri-o://1e27f3b142b77fb968498cdcc70ca2fef5015bc03c13dbb0ea0b79d0063bf8f5" gracePeriod=604796 Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.059939 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-scripts\") pod \"4f253ee0-dab6-43ac-83b0-9d6271963e99\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.060002 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-sg-core-conf-yaml\") pod \"4f253ee0-dab6-43ac-83b0-9d6271963e99\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.060850 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-ceilometer-tls-certs\") pod \"4f253ee0-dab6-43ac-83b0-9d6271963e99\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.060891 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnrln\" (UniqueName: \"kubernetes.io/projected/4f253ee0-dab6-43ac-83b0-9d6271963e99-kube-api-access-rnrln\") pod \"4f253ee0-dab6-43ac-83b0-9d6271963e99\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.060931 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4f253ee0-dab6-43ac-83b0-9d6271963e99-log-httpd\") pod \"4f253ee0-dab6-43ac-83b0-9d6271963e99\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.060967 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-config-data\") pod \"4f253ee0-dab6-43ac-83b0-9d6271963e99\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.061075 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4f253ee0-dab6-43ac-83b0-9d6271963e99-run-httpd\") pod \"4f253ee0-dab6-43ac-83b0-9d6271963e99\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.061128 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-combined-ca-bundle\") pod \"4f253ee0-dab6-43ac-83b0-9d6271963e99\" (UID: \"4f253ee0-dab6-43ac-83b0-9d6271963e99\") " Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.061651 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f253ee0-dab6-43ac-83b0-9d6271963e99-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "4f253ee0-dab6-43ac-83b0-9d6271963e99" (UID: "4f253ee0-dab6-43ac-83b0-9d6271963e99"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.062141 4982 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4f253ee0-dab6-43ac-83b0-9d6271963e99-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.062437 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f253ee0-dab6-43ac-83b0-9d6271963e99-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "4f253ee0-dab6-43ac-83b0-9d6271963e99" (UID: "4f253ee0-dab6-43ac-83b0-9d6271963e99"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.069402 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-scripts" (OuterVolumeSpecName: "scripts") pod "4f253ee0-dab6-43ac-83b0-9d6271963e99" (UID: "4f253ee0-dab6-43ac-83b0-9d6271963e99"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.073458 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f253ee0-dab6-43ac-83b0-9d6271963e99-kube-api-access-rnrln" (OuterVolumeSpecName: "kube-api-access-rnrln") pod "4f253ee0-dab6-43ac-83b0-9d6271963e99" (UID: "4f253ee0-dab6-43ac-83b0-9d6271963e99"). InnerVolumeSpecName "kube-api-access-rnrln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.078953 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"4f253ee0-dab6-43ac-83b0-9d6271963e99","Type":"ContainerDied","Data":"4463aa2549f3abd75279214ba42ffcb074d16bfc710351b018209f030f7e93c8"} Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.079037 4982 scope.go:117] "RemoveContainer" containerID="09fab5474cba184ef492d415851f71589b3666adf371ef1c5240d59b4f99b59a" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.079339 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.128433 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "4f253ee0-dab6-43ac-83b0-9d6271963e99" (UID: "4f253ee0-dab6-43ac-83b0-9d6271963e99"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.165230 4982 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/4f253ee0-dab6-43ac-83b0-9d6271963e99-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.165287 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.165299 4982 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.165315 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnrln\" (UniqueName: \"kubernetes.io/projected/4f253ee0-dab6-43ac-83b0-9d6271963e99-kube-api-access-rnrln\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.169439 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "4f253ee0-dab6-43ac-83b0-9d6271963e99" (UID: "4f253ee0-dab6-43ac-83b0-9d6271963e99"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.182586 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4f253ee0-dab6-43ac-83b0-9d6271963e99" (UID: "4f253ee0-dab6-43ac-83b0-9d6271963e99"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.231115 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-config-data" (OuterVolumeSpecName: "config-data") pod "4f253ee0-dab6-43ac-83b0-9d6271963e99" (UID: "4f253ee0-dab6-43ac-83b0-9d6271963e99"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.259469 4982 scope.go:117] "RemoveContainer" containerID="38c72eabc49fd754ec0f939bc8a7e7ad84405016ca4e32ca4fa3994bba7e639f" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.266749 4982 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.266774 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.266784 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4f253ee0-dab6-43ac-83b0-9d6271963e99-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.278652 4982 scope.go:117] "RemoveContainer" containerID="b59ac17d2b43acb76e9c01473bc60c78edae411feffc0257a29777bb3d2fe10f" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.301949 4982 scope.go:117] "RemoveContainer" containerID="7eede677cdcc059cd5db393561740c918e76032841a0251a6610285e808b196a" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.437514 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.453464 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.469847 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:39:13 crc kubenswrapper[4982]: E1205 19:39:13.470422 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="sg-core" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.470444 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="sg-core" Dec 05 19:39:13 crc kubenswrapper[4982]: E1205 19:39:13.470466 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="proxy-httpd" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.470474 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="proxy-httpd" Dec 05 19:39:13 crc kubenswrapper[4982]: E1205 19:39:13.470504 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="ceilometer-central-agent" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.470513 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="ceilometer-central-agent" Dec 05 19:39:13 crc kubenswrapper[4982]: E1205 19:39:13.470554 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="ceilometer-notification-agent" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.470561 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="ceilometer-notification-agent" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.470795 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="sg-core" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.470838 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="ceilometer-central-agent" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.470850 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="proxy-httpd" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.470858 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" containerName="ceilometer-notification-agent" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.473337 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.475168 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.479072 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.479409 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.487438 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.576849 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fdfbdc81-b68b-414e-af43-2f44719ca203-run-httpd\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.576906 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5jv8\" (UniqueName: \"kubernetes.io/projected/fdfbdc81-b68b-414e-af43-2f44719ca203-kube-api-access-s5jv8\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.576936 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-scripts\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.576975 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-config-data\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.577007 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fdfbdc81-b68b-414e-af43-2f44719ca203-log-httpd\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.577023 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.577061 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.577127 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.678839 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fdfbdc81-b68b-414e-af43-2f44719ca203-log-httpd\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.678877 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.678933 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.679009 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.679041 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fdfbdc81-b68b-414e-af43-2f44719ca203-run-httpd\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.679073 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5jv8\" (UniqueName: \"kubernetes.io/projected/fdfbdc81-b68b-414e-af43-2f44719ca203-kube-api-access-s5jv8\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.679101 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-scripts\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.679156 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-config-data\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.680346 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fdfbdc81-b68b-414e-af43-2f44719ca203-log-httpd\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.680547 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fdfbdc81-b68b-414e-af43-2f44719ca203-run-httpd\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.683290 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.683376 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-config-data\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.685498 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.685764 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-scripts\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.685924 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fdfbdc81-b68b-414e-af43-2f44719ca203-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.698816 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5jv8\" (UniqueName: \"kubernetes.io/projected/fdfbdc81-b68b-414e-af43-2f44719ca203-kube-api-access-s5jv8\") pod \"ceilometer-0\" (UID: \"fdfbdc81-b68b-414e-af43-2f44719ca203\") " pod="openstack/ceilometer-0" Dec 05 19:39:13 crc kubenswrapper[4982]: I1205 19:39:13.805351 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 19:39:14 crc kubenswrapper[4982]: I1205 19:39:14.298297 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 19:39:14 crc kubenswrapper[4982]: I1205 19:39:14.565928 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" containerName="rabbitmq" containerID="cri-o://6eb462887ee90c252433b9506716b48c1e28067938a942de21fc95c9dec55568" gracePeriod=604795 Dec 05 19:39:15 crc kubenswrapper[4982]: I1205 19:39:15.406031 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f253ee0-dab6-43ac-83b0-9d6271963e99" path="/var/lib/kubelet/pods/4f253ee0-dab6-43ac-83b0-9d6271963e99/volumes" Dec 05 19:39:17 crc kubenswrapper[4982]: I1205 19:39:17.516643 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="145ed592-ad9f-487f-940e-71b78c2f48e1" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.106:5671: connect: connection refused" Dec 05 19:39:17 crc kubenswrapper[4982]: I1205 19:39:17.633701 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: connect: connection refused" Dec 05 19:39:19 crc kubenswrapper[4982]: I1205 19:39:19.154877 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fdfbdc81-b68b-414e-af43-2f44719ca203","Type":"ContainerStarted","Data":"184925e9b049a5b33ff58bf2a0d38997c359f4f25fde13d1b820d4c89358ada8"} Dec 05 19:39:20 crc kubenswrapper[4982]: I1205 19:39:20.169957 4982 generic.go:334] "Generic (PLEG): container finished" podID="145ed592-ad9f-487f-940e-71b78c2f48e1" containerID="1e27f3b142b77fb968498cdcc70ca2fef5015bc03c13dbb0ea0b79d0063bf8f5" exitCode=0 Dec 05 19:39:20 crc kubenswrapper[4982]: I1205 19:39:20.170354 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"145ed592-ad9f-487f-940e-71b78c2f48e1","Type":"ContainerDied","Data":"1e27f3b142b77fb968498cdcc70ca2fef5015bc03c13dbb0ea0b79d0063bf8f5"} Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.195979 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-595979776c-p5xw2"] Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.201339 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.207483 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.223471 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-595979776c-p5xw2"] Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.272619 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8kdt\" (UniqueName: \"kubernetes.io/projected/132d8d85-1260-4e88-b8f2-1f7dfe748a08-kube-api-access-b8kdt\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.272678 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-config\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.272725 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-ovsdbserver-sb\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.272773 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-dns-svc\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.272814 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-dns-swift-storage-0\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.272876 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-ovsdbserver-nb\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.272923 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-openstack-edpm-ipam\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.376485 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-dns-swift-storage-0\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.376576 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-ovsdbserver-nb\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.376623 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-openstack-edpm-ipam\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.376648 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8kdt\" (UniqueName: \"kubernetes.io/projected/132d8d85-1260-4e88-b8f2-1f7dfe748a08-kube-api-access-b8kdt\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.376677 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-config\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.376716 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-ovsdbserver-sb\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.376760 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-dns-svc\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.377555 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-dns-svc\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.377952 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-openstack-edpm-ipam\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.378432 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-config\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.378523 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-ovsdbserver-sb\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.378612 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-ovsdbserver-nb\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.378733 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-dns-swift-storage-0\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.410235 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8kdt\" (UniqueName: \"kubernetes.io/projected/132d8d85-1260-4e88-b8f2-1f7dfe748a08-kube-api-access-b8kdt\") pod \"dnsmasq-dns-595979776c-p5xw2\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:22 crc kubenswrapper[4982]: I1205 19:39:22.542128 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:24 crc kubenswrapper[4982]: I1205 19:39:24.236110 4982 generic.go:334] "Generic (PLEG): container finished" podID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" containerID="6eb462887ee90c252433b9506716b48c1e28067938a942de21fc95c9dec55568" exitCode=0 Dec 05 19:39:24 crc kubenswrapper[4982]: I1205 19:39:24.236632 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2c4d593c-5baa-4b09-b586-7b0e65acaa73","Type":"ContainerDied","Data":"6eb462887ee90c252433b9506716b48c1e28067938a942de21fc95c9dec55568"} Dec 05 19:39:28 crc kubenswrapper[4982]: E1205 19:39:28.645569 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested" Dec 05 19:39:28 crc kubenswrapper[4982]: E1205 19:39:28.646163 4982 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested" Dec 05 19:39:28 crc kubenswrapper[4982]: E1205 19:39:28.646299 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c446r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42406,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-np8hc_openstack(ac18c894-f60b-4db2-80a2-82f23f52f9a2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 19:39:28 crc kubenswrapper[4982]: E1205 19:39:28.647487 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-np8hc" podUID="ac18c894-f60b-4db2-80a2-82f23f52f9a2" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.745167 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.762203 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809329 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-confd\") pod \"145ed592-ad9f-487f-940e-71b78c2f48e1\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809381 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-plugins\") pod \"145ed592-ad9f-487f-940e-71b78c2f48e1\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809408 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-tls\") pod \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809437 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-erlang-cookie\") pod \"145ed592-ad9f-487f-940e-71b78c2f48e1\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809462 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56k8j\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-kube-api-access-56k8j\") pod \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809490 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-tls\") pod \"145ed592-ad9f-487f-940e-71b78c2f48e1\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809529 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/145ed592-ad9f-487f-940e-71b78c2f48e1-pod-info\") pod \"145ed592-ad9f-487f-940e-71b78c2f48e1\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809558 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-config-data\") pod \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809617 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2c4d593c-5baa-4b09-b586-7b0e65acaa73-erlang-cookie-secret\") pod \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809708 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-plugins-conf\") pod \"145ed592-ad9f-487f-940e-71b78c2f48e1\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809762 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-config-data\") pod \"145ed592-ad9f-487f-940e-71b78c2f48e1\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809796 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-plugins\") pod \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809822 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-erlang-cookie\") pod \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809850 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-server-conf\") pod \"145ed592-ad9f-487f-940e-71b78c2f48e1\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809871 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2c4d593c-5baa-4b09-b586-7b0e65acaa73-pod-info\") pod \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809910 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/145ed592-ad9f-487f-940e-71b78c2f48e1-erlang-cookie-secret\") pod \"145ed592-ad9f-487f-940e-71b78c2f48e1\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809938 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffm7r\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-kube-api-access-ffm7r\") pod \"145ed592-ad9f-487f-940e-71b78c2f48e1\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.809961 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-plugins-conf\") pod \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.813459 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\") pod \"145ed592-ad9f-487f-940e-71b78c2f48e1\" (UID: \"145ed592-ad9f-487f-940e-71b78c2f48e1\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.819379 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\") pod \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.819470 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-confd\") pod \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.819553 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-server-conf\") pod \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\" (UID: \"2c4d593c-5baa-4b09-b586-7b0e65acaa73\") " Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.821190 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "145ed592-ad9f-487f-940e-71b78c2f48e1" (UID: "145ed592-ad9f-487f-940e-71b78c2f48e1"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.830705 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "2c4d593c-5baa-4b09-b586-7b0e65acaa73" (UID: "2c4d593c-5baa-4b09-b586-7b0e65acaa73"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.832438 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "145ed592-ad9f-487f-940e-71b78c2f48e1" (UID: "145ed592-ad9f-487f-940e-71b78c2f48e1"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.833886 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "2c4d593c-5baa-4b09-b586-7b0e65acaa73" (UID: "2c4d593c-5baa-4b09-b586-7b0e65acaa73"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.834097 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "2c4d593c-5baa-4b09-b586-7b0e65acaa73" (UID: "2c4d593c-5baa-4b09-b586-7b0e65acaa73"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.835203 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "145ed592-ad9f-487f-940e-71b78c2f48e1" (UID: "145ed592-ad9f-487f-940e-71b78c2f48e1"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.849609 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-kube-api-access-56k8j" (OuterVolumeSpecName: "kube-api-access-56k8j") pod "2c4d593c-5baa-4b09-b586-7b0e65acaa73" (UID: "2c4d593c-5baa-4b09-b586-7b0e65acaa73"). InnerVolumeSpecName "kube-api-access-56k8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.854926 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/145ed592-ad9f-487f-940e-71b78c2f48e1-pod-info" (OuterVolumeSpecName: "pod-info") pod "145ed592-ad9f-487f-940e-71b78c2f48e1" (UID: "145ed592-ad9f-487f-940e-71b78c2f48e1"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.859819 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-kube-api-access-ffm7r" (OuterVolumeSpecName: "kube-api-access-ffm7r") pod "145ed592-ad9f-487f-940e-71b78c2f48e1" (UID: "145ed592-ad9f-487f-940e-71b78c2f48e1"). InnerVolumeSpecName "kube-api-access-ffm7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.898244 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/145ed592-ad9f-487f-940e-71b78c2f48e1-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "145ed592-ad9f-487f-940e-71b78c2f48e1" (UID: "145ed592-ad9f-487f-940e-71b78c2f48e1"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.899003 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "145ed592-ad9f-487f-940e-71b78c2f48e1" (UID: "145ed592-ad9f-487f-940e-71b78c2f48e1"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.921261 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/2c4d593c-5baa-4b09-b586-7b0e65acaa73-pod-info" (OuterVolumeSpecName: "pod-info") pod "2c4d593c-5baa-4b09-b586-7b0e65acaa73" (UID: "2c4d593c-5baa-4b09-b586-7b0e65acaa73"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.925098 4982 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.925143 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.925175 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.925188 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56k8j\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-kube-api-access-56k8j\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.925201 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.925213 4982 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/145ed592-ad9f-487f-940e-71b78c2f48e1-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.925225 4982 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.925235 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.925246 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.925258 4982 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2c4d593c-5baa-4b09-b586-7b0e65acaa73-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.925269 4982 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/145ed592-ad9f-487f-940e-71b78c2f48e1-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.925282 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffm7r\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-kube-api-access-ffm7r\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.936475 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "2c4d593c-5baa-4b09-b586-7b0e65acaa73" (UID: "2c4d593c-5baa-4b09-b586-7b0e65acaa73"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:28 crc kubenswrapper[4982]: I1205 19:39:28.953606 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c4d593c-5baa-4b09-b586-7b0e65acaa73-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "2c4d593c-5baa-4b09-b586-7b0e65acaa73" (UID: "2c4d593c-5baa-4b09-b586-7b0e65acaa73"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.014216 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-config-data" (OuterVolumeSpecName: "config-data") pod "2c4d593c-5baa-4b09-b586-7b0e65acaa73" (UID: "2c4d593c-5baa-4b09-b586-7b0e65acaa73"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.028799 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.028825 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.028838 4982 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2c4d593c-5baa-4b09-b586-7b0e65acaa73-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.033051 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-config-data" (OuterVolumeSpecName: "config-data") pod "145ed592-ad9f-487f-940e-71b78c2f48e1" (UID: "145ed592-ad9f-487f-940e-71b78c2f48e1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.115372 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-server-conf" (OuterVolumeSpecName: "server-conf") pod "145ed592-ad9f-487f-940e-71b78c2f48e1" (UID: "145ed592-ad9f-487f-940e-71b78c2f48e1"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.137761 4982 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.137809 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/145ed592-ad9f-487f-940e-71b78c2f48e1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.150286 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-server-conf" (OuterVolumeSpecName: "server-conf") pod "2c4d593c-5baa-4b09-b586-7b0e65acaa73" (UID: "2c4d593c-5baa-4b09-b586-7b0e65acaa73"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.278029 4982 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2c4d593c-5baa-4b09-b586-7b0e65acaa73-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.278802 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7" (OuterVolumeSpecName: "persistence") pod "2c4d593c-5baa-4b09-b586-7b0e65acaa73" (UID: "2c4d593c-5baa-4b09-b586-7b0e65acaa73"). InnerVolumeSpecName "pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.286324 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a" (OuterVolumeSpecName: "persistence") pod "145ed592-ad9f-487f-940e-71b78c2f48e1" (UID: "145ed592-ad9f-487f-940e-71b78c2f48e1"). InnerVolumeSpecName "pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.297242 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "2c4d593c-5baa-4b09-b586-7b0e65acaa73" (UID: "2c4d593c-5baa-4b09-b586-7b0e65acaa73"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.298696 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "145ed592-ad9f-487f-940e-71b78c2f48e1" (UID: "145ed592-ad9f-487f-940e-71b78c2f48e1"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.305434 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-595979776c-p5xw2"] Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.381068 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2c4d593c-5baa-4b09-b586-7b0e65acaa73-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.381107 4982 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/145ed592-ad9f-487f-940e-71b78c2f48e1-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.381172 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\") on node \"crc\" " Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.381194 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\") on node \"crc\" " Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.416135 4982 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.416330 4982 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7") on node "crc" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.424385 4982 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.424518 4982 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a") on node "crc" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.493502 4982 reconciler_common.go:293] "Volume detached for volume \"pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.493556 4982 reconciler_common.go:293] "Volume detached for volume \"pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.537577 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2c4d593c-5baa-4b09-b586-7b0e65acaa73","Type":"ContainerDied","Data":"a995524ac63ebe70eb959623614c175aff9662908b8bcf494be06f10171cfe25"} Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.537636 4982 scope.go:117] "RemoveContainer" containerID="6eb462887ee90c252433b9506716b48c1e28067938a942de21fc95c9dec55568" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.537644 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.543278 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"145ed592-ad9f-487f-940e-71b78c2f48e1","Type":"ContainerDied","Data":"8d1c136a289e7af021d0626c8846095fb795aff874ece0f753d303cefc2aa483"} Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.543396 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.548437 4982 generic.go:334] "Generic (PLEG): container finished" podID="132d8d85-1260-4e88-b8f2-1f7dfe748a08" containerID="02ebd60ae616a6b94a4c54273c6b6a4f0ac434a6fe75ebba7ef5e8672597f408" exitCode=0 Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.549331 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595979776c-p5xw2" event={"ID":"132d8d85-1260-4e88-b8f2-1f7dfe748a08","Type":"ContainerDied","Data":"02ebd60ae616a6b94a4c54273c6b6a4f0ac434a6fe75ebba7ef5e8672597f408"} Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.549387 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595979776c-p5xw2" event={"ID":"132d8d85-1260-4e88-b8f2-1f7dfe748a08","Type":"ContainerStarted","Data":"60694694809f38c0d72fdabbb1abad2241bbd6baabe7bf65db125ad656f06791"} Dec 05 19:39:29 crc kubenswrapper[4982]: E1205 19:39:29.551538 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested\\\"\"" pod="openstack/cloudkitty-db-sync-np8hc" podUID="ac18c894-f60b-4db2-80a2-82f23f52f9a2" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.583685 4982 scope.go:117] "RemoveContainer" containerID="e0e667bb7326f9117e021442929b3a4f8127da2964721e14c6fe962fcb1c5cf5" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.622010 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.647635 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.654903 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.682882 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.704509 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 19:39:29 crc kubenswrapper[4982]: E1205 19:39:29.705302 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" containerName="rabbitmq" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.705316 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" containerName="rabbitmq" Dec 05 19:39:29 crc kubenswrapper[4982]: E1205 19:39:29.705332 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" containerName="setup-container" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.705338 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" containerName="setup-container" Dec 05 19:39:29 crc kubenswrapper[4982]: E1205 19:39:29.705355 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="145ed592-ad9f-487f-940e-71b78c2f48e1" containerName="rabbitmq" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.705360 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="145ed592-ad9f-487f-940e-71b78c2f48e1" containerName="rabbitmq" Dec 05 19:39:29 crc kubenswrapper[4982]: E1205 19:39:29.705376 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="145ed592-ad9f-487f-940e-71b78c2f48e1" containerName="setup-container" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.705381 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="145ed592-ad9f-487f-940e-71b78c2f48e1" containerName="setup-container" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.705590 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="145ed592-ad9f-487f-940e-71b78c2f48e1" containerName="rabbitmq" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.705612 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" containerName="rabbitmq" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.706899 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.710403 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.710612 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.713204 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.713340 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.713542 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.713711 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-5c7gt" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.715019 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.739887 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.743178 4982 scope.go:117] "RemoveContainer" containerID="1e27f3b142b77fb968498cdcc70ca2fef5015bc03c13dbb0ea0b79d0063bf8f5" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.745592 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.747368 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.749959 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.750374 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-jg4zm" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.750609 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.750768 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.750884 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.753076 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.754482 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.768291 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.803665 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d9c1e005-1e95-440f-be18-77dbe6a757db-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.803703 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9j8vm\" (UniqueName: \"kubernetes.io/projected/d9c1e005-1e95-440f-be18-77dbe6a757db-kube-api-access-9j8vm\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.803728 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d9c1e005-1e95-440f-be18-77dbe6a757db-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.803755 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d9c1e005-1e95-440f-be18-77dbe6a757db-config-data\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.803775 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d9c1e005-1e95-440f-be18-77dbe6a757db-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.803828 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d9c1e005-1e95-440f-be18-77dbe6a757db-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.803869 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d9c1e005-1e95-440f-be18-77dbe6a757db-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.803887 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d9c1e005-1e95-440f-be18-77dbe6a757db-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.803909 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.803941 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d9c1e005-1e95-440f-be18-77dbe6a757db-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.803967 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d9c1e005-1e95-440f-be18-77dbe6a757db-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.822345 4982 scope.go:117] "RemoveContainer" containerID="25bd919fb6d1b5d277f9956e642210082f9f9a7875b1f375e7baa0bd19ce6bf9" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906031 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e38c99e0-3c00-4474-9a4e-b388a5630685-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906096 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e38c99e0-3c00-4474-9a4e-b388a5630685-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906220 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e38c99e0-3c00-4474-9a4e-b388a5630685-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906252 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e38c99e0-3c00-4474-9a4e-b388a5630685-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906300 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d9c1e005-1e95-440f-be18-77dbe6a757db-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906366 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e38c99e0-3c00-4474-9a4e-b388a5630685-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906401 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgrxk\" (UniqueName: \"kubernetes.io/projected/e38c99e0-3c00-4474-9a4e-b388a5630685-kube-api-access-lgrxk\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906428 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d9c1e005-1e95-440f-be18-77dbe6a757db-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906458 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d9c1e005-1e95-440f-be18-77dbe6a757db-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906493 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906531 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d9c1e005-1e95-440f-be18-77dbe6a757db-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906560 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d9c1e005-1e95-440f-be18-77dbe6a757db-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906595 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e38c99e0-3c00-4474-9a4e-b388a5630685-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906623 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e38c99e0-3c00-4474-9a4e-b388a5630685-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906657 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e38c99e0-3c00-4474-9a4e-b388a5630685-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906686 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e38c99e0-3c00-4474-9a4e-b388a5630685-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906709 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d9c1e005-1e95-440f-be18-77dbe6a757db-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906728 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906749 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9j8vm\" (UniqueName: \"kubernetes.io/projected/d9c1e005-1e95-440f-be18-77dbe6a757db-kube-api-access-9j8vm\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906772 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d9c1e005-1e95-440f-be18-77dbe6a757db-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906795 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d9c1e005-1e95-440f-be18-77dbe6a757db-config-data\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.906816 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d9c1e005-1e95-440f-be18-77dbe6a757db-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.907788 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d9c1e005-1e95-440f-be18-77dbe6a757db-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.908020 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d9c1e005-1e95-440f-be18-77dbe6a757db-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.908456 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/d9c1e005-1e95-440f-be18-77dbe6a757db-server-conf\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.908792 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d9c1e005-1e95-440f-be18-77dbe6a757db-config-data\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.911843 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d9c1e005-1e95-440f-be18-77dbe6a757db-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.912076 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d9c1e005-1e95-440f-be18-77dbe6a757db-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.912447 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d9c1e005-1e95-440f-be18-77dbe6a757db-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.912577 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d9c1e005-1e95-440f-be18-77dbe6a757db-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.918950 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.918991 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/e6ead355b67baeed78bccda2d66d28a6193337f668634f8727812ed92695a9b2/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.919518 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/d9c1e005-1e95-440f-be18-77dbe6a757db-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.934942 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9j8vm\" (UniqueName: \"kubernetes.io/projected/d9c1e005-1e95-440f-be18-77dbe6a757db-kube-api-access-9j8vm\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:29 crc kubenswrapper[4982]: I1205 19:39:29.994590 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-c4f451ff-1cc3-43f2-8a48-4364b772cc0a\") pod \"rabbitmq-server-0\" (UID: \"d9c1e005-1e95-440f-be18-77dbe6a757db\") " pod="openstack/rabbitmq-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.008050 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e38c99e0-3c00-4474-9a4e-b388a5630685-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.008093 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e38c99e0-3c00-4474-9a4e-b388a5630685-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.008126 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e38c99e0-3c00-4474-9a4e-b388a5630685-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.008166 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e38c99e0-3c00-4474-9a4e-b388a5630685-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.008191 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.008276 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e38c99e0-3c00-4474-9a4e-b388a5630685-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.008297 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e38c99e0-3c00-4474-9a4e-b388a5630685-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.008327 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e38c99e0-3c00-4474-9a4e-b388a5630685-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.008345 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e38c99e0-3c00-4474-9a4e-b388a5630685-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.008391 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e38c99e0-3c00-4474-9a4e-b388a5630685-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.008411 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgrxk\" (UniqueName: \"kubernetes.io/projected/e38c99e0-3c00-4474-9a4e-b388a5630685-kube-api-access-lgrxk\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.008670 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/e38c99e0-3c00-4474-9a4e-b388a5630685-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.009847 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e38c99e0-3c00-4474-9a4e-b388a5630685-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.011785 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/e38c99e0-3c00-4474-9a4e-b388a5630685-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.012356 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/e38c99e0-3c00-4474-9a4e-b388a5630685-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.012636 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/e38c99e0-3c00-4474-9a4e-b388a5630685-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.014401 4982 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.014434 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/ff9c192ce0064b767547340d30e9eff06237e32ce1f9d3aedb8d855c64d41efa/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.014925 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/e38c99e0-3c00-4474-9a4e-b388a5630685-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.016068 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/e38c99e0-3c00-4474-9a4e-b388a5630685-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.018006 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/e38c99e0-3c00-4474-9a4e-b388a5630685-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.018196 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/e38c99e0-3c00-4474-9a4e-b388a5630685-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.029499 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgrxk\" (UniqueName: \"kubernetes.io/projected/e38c99e0-3c00-4474-9a4e-b388a5630685-kube-api-access-lgrxk\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.070010 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ff3509c7-c37f-4e3a-be38-664a11859ee7\") pod \"rabbitmq-cell1-server-0\" (UID: \"e38c99e0-3c00-4474-9a4e-b388a5630685\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.128539 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.199960 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.562161 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595979776c-p5xw2" event={"ID":"132d8d85-1260-4e88-b8f2-1f7dfe748a08","Type":"ContainerStarted","Data":"2cdd693e3c1595bda06629d932c1bc41fbff71843df46087c009be7999a3a6d4"} Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.562360 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.591746 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-595979776c-p5xw2" podStartSLOduration=8.591732167 podStartE2EDuration="8.591732167s" podCreationTimestamp="2025-12-05 19:39:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:39:30.5914699 +0000 UTC m=+1549.473355885" watchObservedRunningTime="2025-12-05 19:39:30.591732167 +0000 UTC m=+1549.473618162" Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.665429 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 19:39:30 crc kubenswrapper[4982]: I1205 19:39:30.774604 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 19:39:31 crc kubenswrapper[4982]: I1205 19:39:31.404794 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="145ed592-ad9f-487f-940e-71b78c2f48e1" path="/var/lib/kubelet/pods/145ed592-ad9f-487f-940e-71b78c2f48e1/volumes" Dec 05 19:39:31 crc kubenswrapper[4982]: I1205 19:39:31.406237 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" path="/var/lib/kubelet/pods/2c4d593c-5baa-4b09-b586-7b0e65acaa73/volumes" Dec 05 19:39:31 crc kubenswrapper[4982]: I1205 19:39:31.574135 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e38c99e0-3c00-4474-9a4e-b388a5630685","Type":"ContainerStarted","Data":"caab92e37044a3be37a4cdb1879ed1aaad6647f65cdb9564bedb8fe5c529fdf8"} Dec 05 19:39:31 crc kubenswrapper[4982]: I1205 19:39:31.576342 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d9c1e005-1e95-440f-be18-77dbe6a757db","Type":"ContainerStarted","Data":"77b78c2ba3d8196999e6c2358a9d8be0953aa0a8fe93af05aa093eab275329a8"} Dec 05 19:39:32 crc kubenswrapper[4982]: I1205 19:39:32.517473 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="145ed592-ad9f-487f-940e-71b78c2f48e1" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.106:5671: i/o timeout" Dec 05 19:39:32 crc kubenswrapper[4982]: I1205 19:39:32.587022 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d9c1e005-1e95-440f-be18-77dbe6a757db","Type":"ContainerStarted","Data":"df6b121019e13ca862e4a768d7ec2496fae023c54217648089ca5f57ea597530"} Dec 05 19:39:32 crc kubenswrapper[4982]: I1205 19:39:32.633494 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="2c4d593c-5baa-4b09-b586-7b0e65acaa73" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.107:5671: i/o timeout" Dec 05 19:39:33 crc kubenswrapper[4982]: I1205 19:39:33.596223 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fdfbdc81-b68b-414e-af43-2f44719ca203","Type":"ContainerStarted","Data":"6d3aaffb1f1b6f76891d9a72889441cbebbee745e9348d45eb9005a463228911"} Dec 05 19:39:34 crc kubenswrapper[4982]: I1205 19:39:34.609122 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e38c99e0-3c00-4474-9a4e-b388a5630685","Type":"ContainerStarted","Data":"06d323f10a7375aba4a06fd985362ad9b42141071bae50421d0a9d8267c15fdf"} Dec 05 19:39:34 crc kubenswrapper[4982]: I1205 19:39:34.611678 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fdfbdc81-b68b-414e-af43-2f44719ca203","Type":"ContainerStarted","Data":"df744cb8e6a52a05c323f1426c144bf1e41ea410f31f3e17feb4a8d02e90746f"} Dec 05 19:39:35 crc kubenswrapper[4982]: I1205 19:39:35.633361 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fdfbdc81-b68b-414e-af43-2f44719ca203","Type":"ContainerStarted","Data":"fdf22051b1c71e6b47088a65773465566a580ffbdf3f5d8b0eea27b2ed40b300"} Dec 05 19:39:36 crc kubenswrapper[4982]: I1205 19:39:36.644053 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"fdfbdc81-b68b-414e-af43-2f44719ca203","Type":"ContainerStarted","Data":"7f40b50631a025c9ac2b47e0fe40220ae1fbae2dac6c085f44e836793ec76030"} Dec 05 19:39:36 crc kubenswrapper[4982]: I1205 19:39:36.644426 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 19:39:36 crc kubenswrapper[4982]: I1205 19:39:36.665750 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=6.443021705 podStartE2EDuration="23.665729445s" podCreationTimestamp="2025-12-05 19:39:13 +0000 UTC" firstStartedPulling="2025-12-05 19:39:18.81279061 +0000 UTC m=+1537.694676605" lastFinishedPulling="2025-12-05 19:39:36.03549835 +0000 UTC m=+1554.917384345" observedRunningTime="2025-12-05 19:39:36.662065172 +0000 UTC m=+1555.543951187" watchObservedRunningTime="2025-12-05 19:39:36.665729445 +0000 UTC m=+1555.547615450" Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.544324 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.691977 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78468d7767-ntj5m"] Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.692217 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" podUID="a957342e-1213-473c-a9a3-bf1e90bf9bf7" containerName="dnsmasq-dns" containerID="cri-o://c2b1af06ccac3262967938e0ae84297794dbdbc59396ad07f2e5edb772001a1e" gracePeriod=10 Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.845727 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5475ccd585-jn7l9"] Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.847468 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.861538 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5475ccd585-jn7l9"] Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.997726 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-openstack-edpm-ipam\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.998101 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-ovsdbserver-sb\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.998134 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-ovsdbserver-nb\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.998170 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jstx\" (UniqueName: \"kubernetes.io/projected/a33770ab-0040-4eb9-92c5-7c25cb66fa33-kube-api-access-2jstx\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.998263 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-dns-swift-storage-0\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.998295 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-dns-svc\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:37 crc kubenswrapper[4982]: I1205 19:39:37.998341 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-config\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.099839 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-dns-swift-storage-0\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.100094 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-dns-svc\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.100243 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-config\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.100354 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-openstack-edpm-ipam\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.100392 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-ovsdbserver-sb\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.100417 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-ovsdbserver-nb\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.100455 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jstx\" (UniqueName: \"kubernetes.io/projected/a33770ab-0040-4eb9-92c5-7c25cb66fa33-kube-api-access-2jstx\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.101132 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-dns-swift-storage-0\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.101711 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-dns-svc\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.101843 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-openstack-edpm-ipam\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.102473 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-ovsdbserver-sb\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.102825 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-config\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.103300 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a33770ab-0040-4eb9-92c5-7c25cb66fa33-ovsdbserver-nb\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.123444 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jstx\" (UniqueName: \"kubernetes.io/projected/a33770ab-0040-4eb9-92c5-7c25cb66fa33-kube-api-access-2jstx\") pod \"dnsmasq-dns-5475ccd585-jn7l9\" (UID: \"a33770ab-0040-4eb9-92c5-7c25cb66fa33\") " pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.203850 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.675300 4982 generic.go:334] "Generic (PLEG): container finished" podID="a957342e-1213-473c-a9a3-bf1e90bf9bf7" containerID="c2b1af06ccac3262967938e0ae84297794dbdbc59396ad07f2e5edb772001a1e" exitCode=0 Dec 05 19:39:38 crc kubenswrapper[4982]: I1205 19:39:38.675665 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" event={"ID":"a957342e-1213-473c-a9a3-bf1e90bf9bf7","Type":"ContainerDied","Data":"c2b1af06ccac3262967938e0ae84297794dbdbc59396ad07f2e5edb772001a1e"} Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.298894 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.432228 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-dns-swift-storage-0\") pod \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.432328 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-dns-svc\") pod \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.432392 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-config\") pod \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.432436 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sgg4g\" (UniqueName: \"kubernetes.io/projected/a957342e-1213-473c-a9a3-bf1e90bf9bf7-kube-api-access-sgg4g\") pod \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.432533 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-ovsdbserver-nb\") pod \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.432581 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-ovsdbserver-sb\") pod \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\" (UID: \"a957342e-1213-473c-a9a3-bf1e90bf9bf7\") " Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.468452 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a957342e-1213-473c-a9a3-bf1e90bf9bf7-kube-api-access-sgg4g" (OuterVolumeSpecName: "kube-api-access-sgg4g") pod "a957342e-1213-473c-a9a3-bf1e90bf9bf7" (UID: "a957342e-1213-473c-a9a3-bf1e90bf9bf7"). InnerVolumeSpecName "kube-api-access-sgg4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.538570 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sgg4g\" (UniqueName: \"kubernetes.io/projected/a957342e-1213-473c-a9a3-bf1e90bf9bf7-kube-api-access-sgg4g\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.579840 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a957342e-1213-473c-a9a3-bf1e90bf9bf7" (UID: "a957342e-1213-473c-a9a3-bf1e90bf9bf7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.589699 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a957342e-1213-473c-a9a3-bf1e90bf9bf7" (UID: "a957342e-1213-473c-a9a3-bf1e90bf9bf7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.601606 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a957342e-1213-473c-a9a3-bf1e90bf9bf7" (UID: "a957342e-1213-473c-a9a3-bf1e90bf9bf7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.612666 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a957342e-1213-473c-a9a3-bf1e90bf9bf7" (UID: "a957342e-1213-473c-a9a3-bf1e90bf9bf7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.630764 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-config" (OuterVolumeSpecName: "config") pod "a957342e-1213-473c-a9a3-bf1e90bf9bf7" (UID: "a957342e-1213-473c-a9a3-bf1e90bf9bf7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.640622 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.640650 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.640659 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.640668 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.640677 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a957342e-1213-473c-a9a3-bf1e90bf9bf7-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.686415 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" event={"ID":"a957342e-1213-473c-a9a3-bf1e90bf9bf7","Type":"ContainerDied","Data":"14fde105179c9afa1e3389c9c4666b82c7e9ebb51f244cee7ae0b1cfefb003ca"} Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.686464 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.686473 4982 scope.go:117] "RemoveContainer" containerID="c2b1af06ccac3262967938e0ae84297794dbdbc59396ad07f2e5edb772001a1e" Dec 05 19:39:39 crc kubenswrapper[4982]: I1205 19:39:39.713278 4982 scope.go:117] "RemoveContainer" containerID="565492ea7d9569f76f04f7fa23be473e07479dd5f85f727dfcbb4b85ce042ed4" Dec 05 19:39:40 crc kubenswrapper[4982]: I1205 19:39:40.059512 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78468d7767-ntj5m"] Dec 05 19:39:40 crc kubenswrapper[4982]: I1205 19:39:40.094678 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78468d7767-ntj5m"] Dec 05 19:39:40 crc kubenswrapper[4982]: I1205 19:39:40.538609 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5475ccd585-jn7l9"] Dec 05 19:39:40 crc kubenswrapper[4982]: I1205 19:39:40.696004 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" event={"ID":"a33770ab-0040-4eb9-92c5-7c25cb66fa33","Type":"ContainerStarted","Data":"0cfb73c4dfef8aa5f0065f7ebb382bc9aaa3ae6a5df06299aa41bdcefadca7d1"} Dec 05 19:39:41 crc kubenswrapper[4982]: I1205 19:39:41.402934 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a957342e-1213-473c-a9a3-bf1e90bf9bf7" path="/var/lib/kubelet/pods/a957342e-1213-473c-a9a3-bf1e90bf9bf7/volumes" Dec 05 19:39:41 crc kubenswrapper[4982]: I1205 19:39:41.708668 4982 generic.go:334] "Generic (PLEG): container finished" podID="a33770ab-0040-4eb9-92c5-7c25cb66fa33" containerID="bb1b53b9c7bfff33436651034df4bf0d9e7f816299600eaad0c3d6cfc351ca21" exitCode=0 Dec 05 19:39:41 crc kubenswrapper[4982]: I1205 19:39:41.708735 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" event={"ID":"a33770ab-0040-4eb9-92c5-7c25cb66fa33","Type":"ContainerDied","Data":"bb1b53b9c7bfff33436651034df4bf0d9e7f816299600eaad0c3d6cfc351ca21"} Dec 05 19:39:42 crc kubenswrapper[4982]: I1205 19:39:42.557805 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:39:42 crc kubenswrapper[4982]: I1205 19:39:42.558625 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:39:42 crc kubenswrapper[4982]: I1205 19:39:42.723522 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" event={"ID":"a33770ab-0040-4eb9-92c5-7c25cb66fa33","Type":"ContainerStarted","Data":"9c9abe95a54246be13d8827a1d07c3cd67164fdc5cda72ce9eb3daea892a3dc1"} Dec 05 19:39:42 crc kubenswrapper[4982]: I1205 19:39:42.723746 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:42 crc kubenswrapper[4982]: I1205 19:39:42.746461 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" podStartSLOduration=5.746441292 podStartE2EDuration="5.746441292s" podCreationTimestamp="2025-12-05 19:39:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:39:42.741617611 +0000 UTC m=+1561.623503616" watchObservedRunningTime="2025-12-05 19:39:42.746441292 +0000 UTC m=+1561.628327287" Dec 05 19:39:43 crc kubenswrapper[4982]: I1205 19:39:43.589560 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 19:39:44 crc kubenswrapper[4982]: I1205 19:39:44.293964 4982 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-78468d7767-ntj5m" podUID="a957342e-1213-473c-a9a3-bf1e90bf9bf7" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.224:5353: i/o timeout" Dec 05 19:39:44 crc kubenswrapper[4982]: I1205 19:39:44.745559 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-np8hc" event={"ID":"ac18c894-f60b-4db2-80a2-82f23f52f9a2","Type":"ContainerStarted","Data":"2ecf85a70d2d177d0dabfdc3dbfb1a2f883dd54d011427190c021ee75087c917"} Dec 05 19:39:44 crc kubenswrapper[4982]: I1205 19:39:44.767702 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-np8hc" podStartSLOduration=2.237912524 podStartE2EDuration="38.767682212s" podCreationTimestamp="2025-12-05 19:39:06 +0000 UTC" firstStartedPulling="2025-12-05 19:39:07.057036898 +0000 UTC m=+1525.938922893" lastFinishedPulling="2025-12-05 19:39:43.586806576 +0000 UTC m=+1562.468692581" observedRunningTime="2025-12-05 19:39:44.764709207 +0000 UTC m=+1563.646595202" watchObservedRunningTime="2025-12-05 19:39:44.767682212 +0000 UTC m=+1563.649568227" Dec 05 19:39:46 crc kubenswrapper[4982]: I1205 19:39:46.766665 4982 generic.go:334] "Generic (PLEG): container finished" podID="ac18c894-f60b-4db2-80a2-82f23f52f9a2" containerID="2ecf85a70d2d177d0dabfdc3dbfb1a2f883dd54d011427190c021ee75087c917" exitCode=0 Dec 05 19:39:46 crc kubenswrapper[4982]: I1205 19:39:46.766865 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-np8hc" event={"ID":"ac18c894-f60b-4db2-80a2-82f23f52f9a2","Type":"ContainerDied","Data":"2ecf85a70d2d177d0dabfdc3dbfb1a2f883dd54d011427190c021ee75087c917"} Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.206250 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5475ccd585-jn7l9" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.278163 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-595979776c-p5xw2"] Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.278440 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-595979776c-p5xw2" podUID="132d8d85-1260-4e88-b8f2-1f7dfe748a08" containerName="dnsmasq-dns" containerID="cri-o://2cdd693e3c1595bda06629d932c1bc41fbff71843df46087c009be7999a3a6d4" gracePeriod=10 Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.370632 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.445636 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-scripts\") pod \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.445728 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c446r\" (UniqueName: \"kubernetes.io/projected/ac18c894-f60b-4db2-80a2-82f23f52f9a2-kube-api-access-c446r\") pod \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.445826 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-combined-ca-bundle\") pod \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.445888 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-config-data\") pod \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.446079 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ac18c894-f60b-4db2-80a2-82f23f52f9a2-certs\") pod \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\" (UID: \"ac18c894-f60b-4db2-80a2-82f23f52f9a2\") " Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.454189 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac18c894-f60b-4db2-80a2-82f23f52f9a2-certs" (OuterVolumeSpecName: "certs") pod "ac18c894-f60b-4db2-80a2-82f23f52f9a2" (UID: "ac18c894-f60b-4db2-80a2-82f23f52f9a2"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.472508 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac18c894-f60b-4db2-80a2-82f23f52f9a2-kube-api-access-c446r" (OuterVolumeSpecName: "kube-api-access-c446r") pod "ac18c894-f60b-4db2-80a2-82f23f52f9a2" (UID: "ac18c894-f60b-4db2-80a2-82f23f52f9a2"). InnerVolumeSpecName "kube-api-access-c446r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.474354 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-scripts" (OuterVolumeSpecName: "scripts") pod "ac18c894-f60b-4db2-80a2-82f23f52f9a2" (UID: "ac18c894-f60b-4db2-80a2-82f23f52f9a2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.482736 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ac18c894-f60b-4db2-80a2-82f23f52f9a2" (UID: "ac18c894-f60b-4db2-80a2-82f23f52f9a2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.489928 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-config-data" (OuterVolumeSpecName: "config-data") pod "ac18c894-f60b-4db2-80a2-82f23f52f9a2" (UID: "ac18c894-f60b-4db2-80a2-82f23f52f9a2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.549429 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.549867 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.549881 4982 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/ac18c894-f60b-4db2-80a2-82f23f52f9a2-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.549893 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ac18c894-f60b-4db2-80a2-82f23f52f9a2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.549907 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c446r\" (UniqueName: \"kubernetes.io/projected/ac18c894-f60b-4db2-80a2-82f23f52f9a2-kube-api-access-c446r\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.791353 4982 generic.go:334] "Generic (PLEG): container finished" podID="132d8d85-1260-4e88-b8f2-1f7dfe748a08" containerID="2cdd693e3c1595bda06629d932c1bc41fbff71843df46087c009be7999a3a6d4" exitCode=0 Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.791461 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595979776c-p5xw2" event={"ID":"132d8d85-1260-4e88-b8f2-1f7dfe748a08","Type":"ContainerDied","Data":"2cdd693e3c1595bda06629d932c1bc41fbff71843df46087c009be7999a3a6d4"} Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.797656 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-np8hc" event={"ID":"ac18c894-f60b-4db2-80a2-82f23f52f9a2","Type":"ContainerDied","Data":"2a7287883ddb3d70c850f7988bdd66d98bbb785b86b35c6de1c58667e800a65a"} Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.797705 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a7287883ddb3d70c850f7988bdd66d98bbb785b86b35c6de1c58667e800a65a" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.797832 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-np8hc" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.827966 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.944229 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-pmmkj"] Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.955481 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-openstack-edpm-ipam\") pod \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.955545 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-dns-svc\") pod \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.955627 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8kdt\" (UniqueName: \"kubernetes.io/projected/132d8d85-1260-4e88-b8f2-1f7dfe748a08-kube-api-access-b8kdt\") pod \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.955668 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-ovsdbserver-nb\") pod \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.955765 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-dns-swift-storage-0\") pod \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.955811 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-config\") pod \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.955853 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-ovsdbserver-sb\") pod \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\" (UID: \"132d8d85-1260-4e88-b8f2-1f7dfe748a08\") " Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.959957 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-pmmkj"] Dec 05 19:39:48 crc kubenswrapper[4982]: I1205 19:39:48.969421 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/132d8d85-1260-4e88-b8f2-1f7dfe748a08-kube-api-access-b8kdt" (OuterVolumeSpecName: "kube-api-access-b8kdt") pod "132d8d85-1260-4e88-b8f2-1f7dfe748a08" (UID: "132d8d85-1260-4e88-b8f2-1f7dfe748a08"). InnerVolumeSpecName "kube-api-access-b8kdt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.010563 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "132d8d85-1260-4e88-b8f2-1f7dfe748a08" (UID: "132d8d85-1260-4e88-b8f2-1f7dfe748a08"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.012746 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "132d8d85-1260-4e88-b8f2-1f7dfe748a08" (UID: "132d8d85-1260-4e88-b8f2-1f7dfe748a08"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.017554 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "132d8d85-1260-4e88-b8f2-1f7dfe748a08" (UID: "132d8d85-1260-4e88-b8f2-1f7dfe748a08"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.020939 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "132d8d85-1260-4e88-b8f2-1f7dfe748a08" (UID: "132d8d85-1260-4e88-b8f2-1f7dfe748a08"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.029664 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-config" (OuterVolumeSpecName: "config") pod "132d8d85-1260-4e88-b8f2-1f7dfe748a08" (UID: "132d8d85-1260-4e88-b8f2-1f7dfe748a08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.032281 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "132d8d85-1260-4e88-b8f2-1f7dfe748a08" (UID: "132d8d85-1260-4e88-b8f2-1f7dfe748a08"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.038936 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-lvfbw"] Dec 05 19:39:49 crc kubenswrapper[4982]: E1205 19:39:49.039410 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a957342e-1213-473c-a9a3-bf1e90bf9bf7" containerName="init" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.039427 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a957342e-1213-473c-a9a3-bf1e90bf9bf7" containerName="init" Dec 05 19:39:49 crc kubenswrapper[4982]: E1205 19:39:49.039453 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac18c894-f60b-4db2-80a2-82f23f52f9a2" containerName="cloudkitty-db-sync" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.039460 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac18c894-f60b-4db2-80a2-82f23f52f9a2" containerName="cloudkitty-db-sync" Dec 05 19:39:49 crc kubenswrapper[4982]: E1205 19:39:49.039478 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132d8d85-1260-4e88-b8f2-1f7dfe748a08" containerName="init" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.039483 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="132d8d85-1260-4e88-b8f2-1f7dfe748a08" containerName="init" Dec 05 19:39:49 crc kubenswrapper[4982]: E1205 19:39:49.039492 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a957342e-1213-473c-a9a3-bf1e90bf9bf7" containerName="dnsmasq-dns" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.039499 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="a957342e-1213-473c-a9a3-bf1e90bf9bf7" containerName="dnsmasq-dns" Dec 05 19:39:49 crc kubenswrapper[4982]: E1205 19:39:49.039520 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="132d8d85-1260-4e88-b8f2-1f7dfe748a08" containerName="dnsmasq-dns" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.039526 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="132d8d85-1260-4e88-b8f2-1f7dfe748a08" containerName="dnsmasq-dns" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.039749 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="a957342e-1213-473c-a9a3-bf1e90bf9bf7" containerName="dnsmasq-dns" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.039766 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac18c894-f60b-4db2-80a2-82f23f52f9a2" containerName="cloudkitty-db-sync" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.039786 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="132d8d85-1260-4e88-b8f2-1f7dfe748a08" containerName="dnsmasq-dns" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.040637 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.042495 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.049763 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-lvfbw"] Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.058269 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.058318 4982 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.058328 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8kdt\" (UniqueName: \"kubernetes.io/projected/132d8d85-1260-4e88-b8f2-1f7dfe748a08-kube-api-access-b8kdt\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.058338 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.058348 4982 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.058357 4982 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-config\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.058364 4982 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/132d8d85-1260-4e88-b8f2-1f7dfe748a08-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.161056 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/78479310-9820-4899-98ac-243473c53a62-certs\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.161211 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-scripts\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.161361 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-config-data\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.161584 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlj27\" (UniqueName: \"kubernetes.io/projected/78479310-9820-4899-98ac-243473c53a62-kube-api-access-rlj27\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.161647 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-combined-ca-bundle\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.264216 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/78479310-9820-4899-98ac-243473c53a62-certs\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.264283 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-scripts\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.264347 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-config-data\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.264452 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlj27\" (UniqueName: \"kubernetes.io/projected/78479310-9820-4899-98ac-243473c53a62-kube-api-access-rlj27\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.264526 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-combined-ca-bundle\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.270724 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-scripts\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.271193 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-config-data\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.283862 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/78479310-9820-4899-98ac-243473c53a62-certs\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.286954 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-combined-ca-bundle\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.292985 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlj27\" (UniqueName: \"kubernetes.io/projected/78479310-9820-4899-98ac-243473c53a62-kube-api-access-rlj27\") pod \"cloudkitty-storageinit-lvfbw\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.355662 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.404568 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9f9181c-b863-4744-bc40-5fd51918c5bd" path="/var/lib/kubelet/pods/a9f9181c-b863-4744-bc40-5fd51918c5bd/volumes" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.810354 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595979776c-p5xw2" event={"ID":"132d8d85-1260-4e88-b8f2-1f7dfe748a08","Type":"ContainerDied","Data":"60694694809f38c0d72fdabbb1abad2241bbd6baabe7bf65db125ad656f06791"} Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.810436 4982 scope.go:117] "RemoveContainer" containerID="2cdd693e3c1595bda06629d932c1bc41fbff71843df46087c009be7999a3a6d4" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.810475 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-595979776c-p5xw2" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.837938 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-595979776c-p5xw2"] Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.844744 4982 scope.go:117] "RemoveContainer" containerID="02ebd60ae616a6b94a4c54273c6b6a4f0ac434a6fe75ebba7ef5e8672597f408" Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.859275 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-595979776c-p5xw2"] Dec 05 19:39:49 crc kubenswrapper[4982]: I1205 19:39:49.876675 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-lvfbw"] Dec 05 19:39:49 crc kubenswrapper[4982]: W1205 19:39:49.891580 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78479310_9820_4899_98ac_243473c53a62.slice/crio-8ee26019f5b8a799ceee2d308f0c1a7eafde7ab3c905c8c73acc89d3cce24ab3 WatchSource:0}: Error finding container 8ee26019f5b8a799ceee2d308f0c1a7eafde7ab3c905c8c73acc89d3cce24ab3: Status 404 returned error can't find the container with id 8ee26019f5b8a799ceee2d308f0c1a7eafde7ab3c905c8c73acc89d3cce24ab3 Dec 05 19:39:50 crc kubenswrapper[4982]: I1205 19:39:50.822940 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-lvfbw" event={"ID":"78479310-9820-4899-98ac-243473c53a62","Type":"ContainerStarted","Data":"b74558ece5d857994012c3cafb5f6f4067adab752c1c94f8817977d6bd7a6fb7"} Dec 05 19:39:50 crc kubenswrapper[4982]: I1205 19:39:50.823279 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-lvfbw" event={"ID":"78479310-9820-4899-98ac-243473c53a62","Type":"ContainerStarted","Data":"8ee26019f5b8a799ceee2d308f0c1a7eafde7ab3c905c8c73acc89d3cce24ab3"} Dec 05 19:39:50 crc kubenswrapper[4982]: I1205 19:39:50.849266 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-lvfbw" podStartSLOduration=1.849244661 podStartE2EDuration="1.849244661s" podCreationTimestamp="2025-12-05 19:39:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:39:50.841126917 +0000 UTC m=+1569.723012942" watchObservedRunningTime="2025-12-05 19:39:50.849244661 +0000 UTC m=+1569.731130656" Dec 05 19:39:51 crc kubenswrapper[4982]: I1205 19:39:51.413837 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="132d8d85-1260-4e88-b8f2-1f7dfe748a08" path="/var/lib/kubelet/pods/132d8d85-1260-4e88-b8f2-1f7dfe748a08/volumes" Dec 05 19:39:51 crc kubenswrapper[4982]: I1205 19:39:51.835603 4982 generic.go:334] "Generic (PLEG): container finished" podID="78479310-9820-4899-98ac-243473c53a62" containerID="b74558ece5d857994012c3cafb5f6f4067adab752c1c94f8817977d6bd7a6fb7" exitCode=0 Dec 05 19:39:51 crc kubenswrapper[4982]: I1205 19:39:51.835676 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-lvfbw" event={"ID":"78479310-9820-4899-98ac-243473c53a62","Type":"ContainerDied","Data":"b74558ece5d857994012c3cafb5f6f4067adab752c1c94f8817977d6bd7a6fb7"} Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.440000 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.549196 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/78479310-9820-4899-98ac-243473c53a62-certs\") pod \"78479310-9820-4899-98ac-243473c53a62\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.549671 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlj27\" (UniqueName: \"kubernetes.io/projected/78479310-9820-4899-98ac-243473c53a62-kube-api-access-rlj27\") pod \"78479310-9820-4899-98ac-243473c53a62\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.549785 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-combined-ca-bundle\") pod \"78479310-9820-4899-98ac-243473c53a62\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.549896 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-scripts\") pod \"78479310-9820-4899-98ac-243473c53a62\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.549949 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-config-data\") pod \"78479310-9820-4899-98ac-243473c53a62\" (UID: \"78479310-9820-4899-98ac-243473c53a62\") " Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.555972 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78479310-9820-4899-98ac-243473c53a62-certs" (OuterVolumeSpecName: "certs") pod "78479310-9820-4899-98ac-243473c53a62" (UID: "78479310-9820-4899-98ac-243473c53a62"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.557872 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-scripts" (OuterVolumeSpecName: "scripts") pod "78479310-9820-4899-98ac-243473c53a62" (UID: "78479310-9820-4899-98ac-243473c53a62"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.560348 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78479310-9820-4899-98ac-243473c53a62-kube-api-access-rlj27" (OuterVolumeSpecName: "kube-api-access-rlj27") pod "78479310-9820-4899-98ac-243473c53a62" (UID: "78479310-9820-4899-98ac-243473c53a62"). InnerVolumeSpecName "kube-api-access-rlj27". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.581912 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-config-data" (OuterVolumeSpecName: "config-data") pod "78479310-9820-4899-98ac-243473c53a62" (UID: "78479310-9820-4899-98ac-243473c53a62"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.584287 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "78479310-9820-4899-98ac-243473c53a62" (UID: "78479310-9820-4899-98ac-243473c53a62"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.652629 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlj27\" (UniqueName: \"kubernetes.io/projected/78479310-9820-4899-98ac-243473c53a62-kube-api-access-rlj27\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.652666 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.652676 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.652684 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78479310-9820-4899-98ac-243473c53a62-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.652692 4982 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/78479310-9820-4899-98ac-243473c53a62-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.856986 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-lvfbw" event={"ID":"78479310-9820-4899-98ac-243473c53a62","Type":"ContainerDied","Data":"8ee26019f5b8a799ceee2d308f0c1a7eafde7ab3c905c8c73acc89d3cce24ab3"} Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.857283 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8ee26019f5b8a799ceee2d308f0c1a7eafde7ab3c905c8c73acc89d3cce24ab3" Dec 05 19:39:53 crc kubenswrapper[4982]: I1205 19:39:53.857043 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-lvfbw" Dec 05 19:39:54 crc kubenswrapper[4982]: I1205 19:39:54.015316 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:39:54 crc kubenswrapper[4982]: I1205 19:39:54.015552 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="c5a3369c-f919-4454-8675-60b641794708" containerName="cloudkitty-proc" containerID="cri-o://0f764eeed87c84b9be9aaf8bd10ccd088023ac9a79b833a4fe1ce9c4cf1eb54b" gracePeriod=30 Dec 05 19:39:54 crc kubenswrapper[4982]: I1205 19:39:54.033520 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:39:54 crc kubenswrapper[4982]: I1205 19:39:54.033802 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="2d605444-dcdf-4df1-ac2a-3b821d07d390" containerName="cloudkitty-api-log" containerID="cri-o://2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf" gracePeriod=30 Dec 05 19:39:54 crc kubenswrapper[4982]: I1205 19:39:54.033888 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="2d605444-dcdf-4df1-ac2a-3b821d07d390" containerName="cloudkitty-api" containerID="cri-o://3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6" gracePeriod=30 Dec 05 19:39:54 crc kubenswrapper[4982]: I1205 19:39:54.872701 4982 generic.go:334] "Generic (PLEG): container finished" podID="c5a3369c-f919-4454-8675-60b641794708" containerID="0f764eeed87c84b9be9aaf8bd10ccd088023ac9a79b833a4fe1ce9c4cf1eb54b" exitCode=0 Dec 05 19:39:54 crc kubenswrapper[4982]: I1205 19:39:54.872759 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"c5a3369c-f919-4454-8675-60b641794708","Type":"ContainerDied","Data":"0f764eeed87c84b9be9aaf8bd10ccd088023ac9a79b833a4fe1ce9c4cf1eb54b"} Dec 05 19:39:54 crc kubenswrapper[4982]: I1205 19:39:54.875547 4982 generic.go:334] "Generic (PLEG): container finished" podID="2d605444-dcdf-4df1-ac2a-3b821d07d390" containerID="2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf" exitCode=143 Dec 05 19:39:54 crc kubenswrapper[4982]: I1205 19:39:54.875581 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"2d605444-dcdf-4df1-ac2a-3b821d07d390","Type":"ContainerDied","Data":"2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf"} Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.204401 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.288878 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-scripts\") pod \"c5a3369c-f919-4454-8675-60b641794708\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.288943 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-combined-ca-bundle\") pod \"c5a3369c-f919-4454-8675-60b641794708\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.289018 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfxc9\" (UniqueName: \"kubernetes.io/projected/c5a3369c-f919-4454-8675-60b641794708-kube-api-access-tfxc9\") pod \"c5a3369c-f919-4454-8675-60b641794708\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.289047 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-config-data\") pod \"c5a3369c-f919-4454-8675-60b641794708\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.289097 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-config-data-custom\") pod \"c5a3369c-f919-4454-8675-60b641794708\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.289139 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/c5a3369c-f919-4454-8675-60b641794708-certs\") pod \"c5a3369c-f919-4454-8675-60b641794708\" (UID: \"c5a3369c-f919-4454-8675-60b641794708\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.294536 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-scripts" (OuterVolumeSpecName: "scripts") pod "c5a3369c-f919-4454-8675-60b641794708" (UID: "c5a3369c-f919-4454-8675-60b641794708"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.294734 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5a3369c-f919-4454-8675-60b641794708-certs" (OuterVolumeSpecName: "certs") pod "c5a3369c-f919-4454-8675-60b641794708" (UID: "c5a3369c-f919-4454-8675-60b641794708"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.294910 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c5a3369c-f919-4454-8675-60b641794708-kube-api-access-tfxc9" (OuterVolumeSpecName: "kube-api-access-tfxc9") pod "c5a3369c-f919-4454-8675-60b641794708" (UID: "c5a3369c-f919-4454-8675-60b641794708"). InnerVolumeSpecName "kube-api-access-tfxc9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.308017 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c5a3369c-f919-4454-8675-60b641794708" (UID: "c5a3369c-f919-4454-8675-60b641794708"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.319262 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c5a3369c-f919-4454-8675-60b641794708" (UID: "c5a3369c-f919-4454-8675-60b641794708"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.323193 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-config-data" (OuterVolumeSpecName: "config-data") pod "c5a3369c-f919-4454-8675-60b641794708" (UID: "c5a3369c-f919-4454-8675-60b641794708"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.361948 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.395009 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ch48x\" (UniqueName: \"kubernetes.io/projected/2d605444-dcdf-4df1-ac2a-3b821d07d390-kube-api-access-ch48x\") pod \"2d605444-dcdf-4df1-ac2a-3b821d07d390\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.395121 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-public-tls-certs\") pod \"2d605444-dcdf-4df1-ac2a-3b821d07d390\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.395176 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2d605444-dcdf-4df1-ac2a-3b821d07d390-certs\") pod \"2d605444-dcdf-4df1-ac2a-3b821d07d390\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.395244 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-internal-tls-certs\") pod \"2d605444-dcdf-4df1-ac2a-3b821d07d390\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.395334 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-config-data\") pod \"2d605444-dcdf-4df1-ac2a-3b821d07d390\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.395399 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d605444-dcdf-4df1-ac2a-3b821d07d390-logs\") pod \"2d605444-dcdf-4df1-ac2a-3b821d07d390\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.395473 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-scripts\") pod \"2d605444-dcdf-4df1-ac2a-3b821d07d390\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.395523 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-combined-ca-bundle\") pod \"2d605444-dcdf-4df1-ac2a-3b821d07d390\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.395554 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-config-data-custom\") pod \"2d605444-dcdf-4df1-ac2a-3b821d07d390\" (UID: \"2d605444-dcdf-4df1-ac2a-3b821d07d390\") " Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.397624 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2d605444-dcdf-4df1-ac2a-3b821d07d390-logs" (OuterVolumeSpecName: "logs") pod "2d605444-dcdf-4df1-ac2a-3b821d07d390" (UID: "2d605444-dcdf-4df1-ac2a-3b821d07d390"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.403601 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2d605444-dcdf-4df1-ac2a-3b821d07d390" (UID: "2d605444-dcdf-4df1-ac2a-3b821d07d390"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.407069 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-scripts" (OuterVolumeSpecName: "scripts") pod "2d605444-dcdf-4df1-ac2a-3b821d07d390" (UID: "2d605444-dcdf-4df1-ac2a-3b821d07d390"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.407417 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d605444-dcdf-4df1-ac2a-3b821d07d390-kube-api-access-ch48x" (OuterVolumeSpecName: "kube-api-access-ch48x") pod "2d605444-dcdf-4df1-ac2a-3b821d07d390" (UID: "2d605444-dcdf-4df1-ac2a-3b821d07d390"). InnerVolumeSpecName "kube-api-access-ch48x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.407656 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.407688 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.407704 4982 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2d605444-dcdf-4df1-ac2a-3b821d07d390-logs\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.407716 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfxc9\" (UniqueName: \"kubernetes.io/projected/c5a3369c-f919-4454-8675-60b641794708-kube-api-access-tfxc9\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.407728 4982 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.407739 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.407750 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.407762 4982 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c5a3369c-f919-4454-8675-60b641794708-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.407773 4982 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/c5a3369c-f919-4454-8675-60b641794708-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.415909 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d605444-dcdf-4df1-ac2a-3b821d07d390-certs" (OuterVolumeSpecName: "certs") pod "2d605444-dcdf-4df1-ac2a-3b821d07d390" (UID: "2d605444-dcdf-4df1-ac2a-3b821d07d390"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.446954 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2d605444-dcdf-4df1-ac2a-3b821d07d390" (UID: "2d605444-dcdf-4df1-ac2a-3b821d07d390"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.459858 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-config-data" (OuterVolumeSpecName: "config-data") pod "2d605444-dcdf-4df1-ac2a-3b821d07d390" (UID: "2d605444-dcdf-4df1-ac2a-3b821d07d390"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.483959 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "2d605444-dcdf-4df1-ac2a-3b821d07d390" (UID: "2d605444-dcdf-4df1-ac2a-3b821d07d390"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.485079 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "2d605444-dcdf-4df1-ac2a-3b821d07d390" (UID: "2d605444-dcdf-4df1-ac2a-3b821d07d390"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.509290 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ch48x\" (UniqueName: \"kubernetes.io/projected/2d605444-dcdf-4df1-ac2a-3b821d07d390-kube-api-access-ch48x\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.509339 4982 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.509363 4982 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/2d605444-dcdf-4df1-ac2a-3b821d07d390-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.509374 4982 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.509382 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.509390 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2d605444-dcdf-4df1-ac2a-3b821d07d390-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.890694 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"c5a3369c-f919-4454-8675-60b641794708","Type":"ContainerDied","Data":"93d1f6fdf4673538fdb395d2304d4886ba698fdb09c65232fc63926d7cf330d5"} Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.890756 4982 scope.go:117] "RemoveContainer" containerID="0f764eeed87c84b9be9aaf8bd10ccd088023ac9a79b833a4fe1ce9c4cf1eb54b" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.890819 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.894817 4982 generic.go:334] "Generic (PLEG): container finished" podID="2d605444-dcdf-4df1-ac2a-3b821d07d390" containerID="3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6" exitCode=0 Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.894878 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"2d605444-dcdf-4df1-ac2a-3b821d07d390","Type":"ContainerDied","Data":"3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6"} Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.894917 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"2d605444-dcdf-4df1-ac2a-3b821d07d390","Type":"ContainerDied","Data":"12eac0c79b931dead3e375d7b153b536b1bfad76d008632a651ef36f9c9aab54"} Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.894991 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.940608 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.952311 4982 scope.go:117] "RemoveContainer" containerID="3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.966982 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.987395 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:39:55 crc kubenswrapper[4982]: E1205 19:39:55.987954 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d605444-dcdf-4df1-ac2a-3b821d07d390" containerName="cloudkitty-api-log" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.987972 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d605444-dcdf-4df1-ac2a-3b821d07d390" containerName="cloudkitty-api-log" Dec 05 19:39:55 crc kubenswrapper[4982]: E1205 19:39:55.987992 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c5a3369c-f919-4454-8675-60b641794708" containerName="cloudkitty-proc" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.988003 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c5a3369c-f919-4454-8675-60b641794708" containerName="cloudkitty-proc" Dec 05 19:39:55 crc kubenswrapper[4982]: E1205 19:39:55.988040 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78479310-9820-4899-98ac-243473c53a62" containerName="cloudkitty-storageinit" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.988049 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="78479310-9820-4899-98ac-243473c53a62" containerName="cloudkitty-storageinit" Dec 05 19:39:55 crc kubenswrapper[4982]: E1205 19:39:55.988074 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d605444-dcdf-4df1-ac2a-3b821d07d390" containerName="cloudkitty-api" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.988083 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d605444-dcdf-4df1-ac2a-3b821d07d390" containerName="cloudkitty-api" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.988341 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d605444-dcdf-4df1-ac2a-3b821d07d390" containerName="cloudkitty-api" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.988362 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c5a3369c-f919-4454-8675-60b641794708" containerName="cloudkitty-proc" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.988386 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d605444-dcdf-4df1-ac2a-3b821d07d390" containerName="cloudkitty-api-log" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.988401 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="78479310-9820-4899-98ac-243473c53a62" containerName="cloudkitty-storageinit" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.989344 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.994196 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-xxdrk" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.994648 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.994760 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.994839 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.995177 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 05 19:39:55 crc kubenswrapper[4982]: I1205 19:39:55.995507 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.007466 4982 scope.go:117] "RemoveContainer" containerID="2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.011961 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.024240 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.040449 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.042259 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.044940 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.046163 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.046392 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.056062 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.059030 4982 scope.go:117] "RemoveContainer" containerID="3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6" Dec 05 19:39:56 crc kubenswrapper[4982]: E1205 19:39:56.059671 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6\": container with ID starting with 3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6 not found: ID does not exist" containerID="3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.059711 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6"} err="failed to get container status \"3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6\": rpc error: code = NotFound desc = could not find container \"3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6\": container with ID starting with 3471723b15bd53673f82b351563b62f45fce5bb31b2ac26085ac8bfeb05020d6 not found: ID does not exist" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.059736 4982 scope.go:117] "RemoveContainer" containerID="2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf" Dec 05 19:39:56 crc kubenswrapper[4982]: E1205 19:39:56.060007 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf\": container with ID starting with 2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf not found: ID does not exist" containerID="2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.060026 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf"} err="failed to get container status \"2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf\": rpc error: code = NotFound desc = could not find container \"2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf\": container with ID starting with 2e5e2f0d2216e980cc4d20a060f227ee83cd60698e28cc3ca8325b7579a1eacf not found: ID does not exist" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126097 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126177 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126210 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-scripts\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126227 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-config-data\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126299 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126323 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126345 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126371 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-scripts\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126396 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dwqn\" (UniqueName: \"kubernetes.io/projected/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-kube-api-access-6dwqn\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126414 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-config-data\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126437 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-certs\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126463 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/a30af9ef-11ee-4919-8ca8-2ba7d588264b-certs\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126487 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126506 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cz5tq\" (UniqueName: \"kubernetes.io/projected/a30af9ef-11ee-4919-8ca8-2ba7d588264b-kube-api-access-cz5tq\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.126525 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a30af9ef-11ee-4919-8ca8-2ba7d588264b-logs\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228362 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/a30af9ef-11ee-4919-8ca8-2ba7d588264b-certs\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228408 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228433 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cz5tq\" (UniqueName: \"kubernetes.io/projected/a30af9ef-11ee-4919-8ca8-2ba7d588264b-kube-api-access-cz5tq\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228451 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a30af9ef-11ee-4919-8ca8-2ba7d588264b-logs\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228520 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228553 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228585 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-scripts\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228599 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-config-data\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228656 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228674 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228689 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228708 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-scripts\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228732 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dwqn\" (UniqueName: \"kubernetes.io/projected/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-kube-api-access-6dwqn\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228748 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-config-data\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.228774 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-certs\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.230236 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a30af9ef-11ee-4919-8ca8-2ba7d588264b-logs\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.236916 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-certs\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.237434 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/a30af9ef-11ee-4919-8ca8-2ba7d588264b-certs\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.240641 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-scripts\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.242571 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.242662 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.243043 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.243520 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.246478 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-scripts\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.246691 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-config-data\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.247732 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.249915 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.250783 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a30af9ef-11ee-4919-8ca8-2ba7d588264b-config-data\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.251570 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dwqn\" (UniqueName: \"kubernetes.io/projected/541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d-kube-api-access-6dwqn\") pod \"cloudkitty-proc-0\" (UID: \"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d\") " pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.273964 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cz5tq\" (UniqueName: \"kubernetes.io/projected/a30af9ef-11ee-4919-8ca8-2ba7d588264b-kube-api-access-cz5tq\") pod \"cloudkitty-api-0\" (UID: \"a30af9ef-11ee-4919-8ca8-2ba7d588264b\") " pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.317506 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.362489 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 19:39:56 crc kubenswrapper[4982]: W1205 19:39:56.836782 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod541f0bcc_c2ab_4e67_bc9a_b45a2dc5747d.slice/crio-a8dd970e5ea62544eacbdaaffa01eed1fe9872584f27fb70568d20c852669462 WatchSource:0}: Error finding container a8dd970e5ea62544eacbdaaffa01eed1fe9872584f27fb70568d20c852669462: Status 404 returned error can't find the container with id a8dd970e5ea62544eacbdaaffa01eed1fe9872584f27fb70568d20c852669462 Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.837254 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.906120 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d","Type":"ContainerStarted","Data":"a8dd970e5ea62544eacbdaaffa01eed1fe9872584f27fb70568d20c852669462"} Dec 05 19:39:56 crc kubenswrapper[4982]: W1205 19:39:56.919385 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda30af9ef_11ee_4919_8ca8_2ba7d588264b.slice/crio-aec844b8abe210b2e1fcb96e54c8502feca2dd1106f94aab420d4aa1712219f8 WatchSource:0}: Error finding container aec844b8abe210b2e1fcb96e54c8502feca2dd1106f94aab420d4aa1712219f8: Status 404 returned error can't find the container with id aec844b8abe210b2e1fcb96e54c8502feca2dd1106f94aab420d4aa1712219f8 Dec 05 19:39:56 crc kubenswrapper[4982]: I1205 19:39:56.924515 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 19:39:57 crc kubenswrapper[4982]: I1205 19:39:57.405458 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d605444-dcdf-4df1-ac2a-3b821d07d390" path="/var/lib/kubelet/pods/2d605444-dcdf-4df1-ac2a-3b821d07d390/volumes" Dec 05 19:39:57 crc kubenswrapper[4982]: I1205 19:39:57.406645 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c5a3369c-f919-4454-8675-60b641794708" path="/var/lib/kubelet/pods/c5a3369c-f919-4454-8675-60b641794708/volumes" Dec 05 19:39:57 crc kubenswrapper[4982]: I1205 19:39:57.955344 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"a30af9ef-11ee-4919-8ca8-2ba7d588264b","Type":"ContainerStarted","Data":"b729c0eb7e2695a54a341b204519ae2e3f5f31588a0f0abc69924faf19041e63"} Dec 05 19:39:57 crc kubenswrapper[4982]: I1205 19:39:57.955690 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"a30af9ef-11ee-4919-8ca8-2ba7d588264b","Type":"ContainerStarted","Data":"fb66940b96e6c605966c69cdd4e8c50530fd14b56b0c3a88e0808ad5c623739b"} Dec 05 19:39:57 crc kubenswrapper[4982]: I1205 19:39:57.955704 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"a30af9ef-11ee-4919-8ca8-2ba7d588264b","Type":"ContainerStarted","Data":"aec844b8abe210b2e1fcb96e54c8502feca2dd1106f94aab420d4aa1712219f8"} Dec 05 19:39:57 crc kubenswrapper[4982]: I1205 19:39:57.956770 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 05 19:39:57 crc kubenswrapper[4982]: I1205 19:39:57.987397 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=2.987376986 podStartE2EDuration="2.987376986s" podCreationTimestamp="2025-12-05 19:39:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:39:57.973880286 +0000 UTC m=+1576.855766301" watchObservedRunningTime="2025-12-05 19:39:57.987376986 +0000 UTC m=+1576.869262981" Dec 05 19:39:58 crc kubenswrapper[4982]: I1205 19:39:58.965701 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d","Type":"ContainerStarted","Data":"72d5fda0ed49745bc1d325e0e2d39d3508eccbca74cfc7ddf914e0053568af24"} Dec 05 19:39:58 crc kubenswrapper[4982]: I1205 19:39:58.987350 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=3.05381858 podStartE2EDuration="3.987331148s" podCreationTimestamp="2025-12-05 19:39:55 +0000 UTC" firstStartedPulling="2025-12-05 19:39:56.839707877 +0000 UTC m=+1575.721593872" lastFinishedPulling="2025-12-05 19:39:57.773220445 +0000 UTC m=+1576.655106440" observedRunningTime="2025-12-05 19:39:58.979571753 +0000 UTC m=+1577.861457778" watchObservedRunningTime="2025-12-05 19:39:58.987331148 +0000 UTC m=+1577.869217143" Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.028468 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-wz6vt"] Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.030507 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.051699 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wz6vt"] Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.102359 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7b99s\" (UniqueName: \"kubernetes.io/projected/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-kube-api-access-7b99s\") pod \"community-operators-wz6vt\" (UID: \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\") " pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.102421 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-catalog-content\") pod \"community-operators-wz6vt\" (UID: \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\") " pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.102459 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-utilities\") pod \"community-operators-wz6vt\" (UID: \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\") " pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.204049 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7b99s\" (UniqueName: \"kubernetes.io/projected/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-kube-api-access-7b99s\") pod \"community-operators-wz6vt\" (UID: \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\") " pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.204114 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-catalog-content\") pod \"community-operators-wz6vt\" (UID: \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\") " pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.204168 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-utilities\") pod \"community-operators-wz6vt\" (UID: \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\") " pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.204693 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-utilities\") pod \"community-operators-wz6vt\" (UID: \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\") " pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.205181 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-catalog-content\") pod \"community-operators-wz6vt\" (UID: \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\") " pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.228824 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7b99s\" (UniqueName: \"kubernetes.io/projected/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-kube-api-access-7b99s\") pod \"community-operators-wz6vt\" (UID: \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\") " pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.348382 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:00 crc kubenswrapper[4982]: I1205 19:40:00.958586 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-wz6vt"] Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.006390 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz6vt" event={"ID":"269a87e4-916c-4fbc-a087-ffcb85f9dfd1","Type":"ContainerStarted","Data":"f4619fd50de04b2b23ddc3156e93d7d4664fccd13dae17737e497d9286548b92"} Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.163206 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg"] Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.164933 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.177641 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.177674 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.177928 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.178163 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.178209 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg"] Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.248189 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx5p4\" (UniqueName: \"kubernetes.io/projected/1458b955-6299-4a91-a904-4146c620e208-kube-api-access-jx5p4\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.248247 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.248341 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.248509 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.350676 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.350782 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx5p4\" (UniqueName: \"kubernetes.io/projected/1458b955-6299-4a91-a904-4146c620e208-kube-api-access-jx5p4\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.350837 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.351660 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.358439 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.372177 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.372686 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.375906 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx5p4\" (UniqueName: \"kubernetes.io/projected/1458b955-6299-4a91-a904-4146c620e208-kube-api-access-jx5p4\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:01 crc kubenswrapper[4982]: I1205 19:40:01.499369 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:02 crc kubenswrapper[4982]: I1205 19:40:02.026474 4982 generic.go:334] "Generic (PLEG): container finished" podID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" containerID="2fa00262dbb24816a59f331c0e6f8edd564f679994fb1354e46ebf9b219b07a1" exitCode=0 Dec 05 19:40:02 crc kubenswrapper[4982]: I1205 19:40:02.026705 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz6vt" event={"ID":"269a87e4-916c-4fbc-a087-ffcb85f9dfd1","Type":"ContainerDied","Data":"2fa00262dbb24816a59f331c0e6f8edd564f679994fb1354e46ebf9b219b07a1"} Dec 05 19:40:02 crc kubenswrapper[4982]: I1205 19:40:02.093793 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg"] Dec 05 19:40:02 crc kubenswrapper[4982]: W1205 19:40:02.135855 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1458b955_6299_4a91_a904_4146c620e208.slice/crio-67ad5148aa63168bd98c204e7cb7e0839557e3405ecf36cb4ce938df6bb54557 WatchSource:0}: Error finding container 67ad5148aa63168bd98c204e7cb7e0839557e3405ecf36cb4ce938df6bb54557: Status 404 returned error can't find the container with id 67ad5148aa63168bd98c204e7cb7e0839557e3405ecf36cb4ce938df6bb54557 Dec 05 19:40:03 crc kubenswrapper[4982]: I1205 19:40:03.045866 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz6vt" event={"ID":"269a87e4-916c-4fbc-a087-ffcb85f9dfd1","Type":"ContainerStarted","Data":"0562e875165c6b81f485355e04c1557e0a06c3fa4cb922bb8d2f7f1c5006c605"} Dec 05 19:40:03 crc kubenswrapper[4982]: I1205 19:40:03.051335 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" event={"ID":"1458b955-6299-4a91-a904-4146c620e208","Type":"ContainerStarted","Data":"67ad5148aa63168bd98c204e7cb7e0839557e3405ecf36cb4ce938df6bb54557"} Dec 05 19:40:04 crc kubenswrapper[4982]: I1205 19:40:04.089333 4982 generic.go:334] "Generic (PLEG): container finished" podID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" containerID="0562e875165c6b81f485355e04c1557e0a06c3fa4cb922bb8d2f7f1c5006c605" exitCode=0 Dec 05 19:40:04 crc kubenswrapper[4982]: I1205 19:40:04.089666 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz6vt" event={"ID":"269a87e4-916c-4fbc-a087-ffcb85f9dfd1","Type":"ContainerDied","Data":"0562e875165c6b81f485355e04c1557e0a06c3fa4cb922bb8d2f7f1c5006c605"} Dec 05 19:40:07 crc kubenswrapper[4982]: I1205 19:40:07.124192 4982 generic.go:334] "Generic (PLEG): container finished" podID="d9c1e005-1e95-440f-be18-77dbe6a757db" containerID="df6b121019e13ca862e4a768d7ec2496fae023c54217648089ca5f57ea597530" exitCode=0 Dec 05 19:40:07 crc kubenswrapper[4982]: I1205 19:40:07.124315 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d9c1e005-1e95-440f-be18-77dbe6a757db","Type":"ContainerDied","Data":"df6b121019e13ca862e4a768d7ec2496fae023c54217648089ca5f57ea597530"} Dec 05 19:40:08 crc kubenswrapper[4982]: I1205 19:40:08.136814 4982 generic.go:334] "Generic (PLEG): container finished" podID="e38c99e0-3c00-4474-9a4e-b388a5630685" containerID="06d323f10a7375aba4a06fd985362ad9b42141071bae50421d0a9d8267c15fdf" exitCode=0 Dec 05 19:40:08 crc kubenswrapper[4982]: I1205 19:40:08.136904 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e38c99e0-3c00-4474-9a4e-b388a5630685","Type":"ContainerDied","Data":"06d323f10a7375aba4a06fd985362ad9b42141071bae50421d0a9d8267c15fdf"} Dec 05 19:40:12 crc kubenswrapper[4982]: I1205 19:40:12.556800 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:40:12 crc kubenswrapper[4982]: I1205 19:40:12.557494 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:40:13 crc kubenswrapper[4982]: I1205 19:40:13.817212 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 19:40:14 crc kubenswrapper[4982]: I1205 19:40:14.204462 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz6vt" event={"ID":"269a87e4-916c-4fbc-a087-ffcb85f9dfd1","Type":"ContainerStarted","Data":"f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127"} Dec 05 19:40:14 crc kubenswrapper[4982]: I1205 19:40:14.207405 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"d9c1e005-1e95-440f-be18-77dbe6a757db","Type":"ContainerStarted","Data":"f3143e83b586d0677ca00709461bf0260bae8bda24253cfba0ffea8726adb64a"} Dec 05 19:40:14 crc kubenswrapper[4982]: I1205 19:40:14.208119 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 19:40:14 crc kubenswrapper[4982]: I1205 19:40:14.210682 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"e38c99e0-3c00-4474-9a4e-b388a5630685","Type":"ContainerStarted","Data":"c187277f3a0c26eb2f971da76d7089eca8158fe28801dca6888b6700c2b37b34"} Dec 05 19:40:14 crc kubenswrapper[4982]: I1205 19:40:14.210933 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:40:14 crc kubenswrapper[4982]: I1205 19:40:14.230972 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-wz6vt" podStartSLOduration=3.536049798 podStartE2EDuration="15.230949979s" podCreationTimestamp="2025-12-05 19:39:59 +0000 UTC" firstStartedPulling="2025-12-05 19:40:02.03001097 +0000 UTC m=+1580.911896965" lastFinishedPulling="2025-12-05 19:40:13.724911151 +0000 UTC m=+1592.606797146" observedRunningTime="2025-12-05 19:40:14.228023645 +0000 UTC m=+1593.109909650" watchObservedRunningTime="2025-12-05 19:40:14.230949979 +0000 UTC m=+1593.112835964" Dec 05 19:40:14 crc kubenswrapper[4982]: I1205 19:40:14.270077 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=45.270055733 podStartE2EDuration="45.270055733s" podCreationTimestamp="2025-12-05 19:39:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:40:14.259223751 +0000 UTC m=+1593.141109756" watchObservedRunningTime="2025-12-05 19:40:14.270055733 +0000 UTC m=+1593.151941728" Dec 05 19:40:15 crc kubenswrapper[4982]: I1205 19:40:15.222680 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" event={"ID":"1458b955-6299-4a91-a904-4146c620e208","Type":"ContainerStarted","Data":"0d06824a6aae27c51b69afca35b5d914c6077acd72fd394cdb5b629d6ac9b22e"} Dec 05 19:40:15 crc kubenswrapper[4982]: I1205 19:40:15.260735 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=46.26071885 podStartE2EDuration="46.26071885s" podCreationTimestamp="2025-12-05 19:39:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 19:40:14.303496305 +0000 UTC m=+1593.185382300" watchObservedRunningTime="2025-12-05 19:40:15.26071885 +0000 UTC m=+1594.142604845" Dec 05 19:40:15 crc kubenswrapper[4982]: I1205 19:40:15.265631 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" podStartSLOduration=2.575051071 podStartE2EDuration="14.265616463s" podCreationTimestamp="2025-12-05 19:40:01 +0000 UTC" firstStartedPulling="2025-12-05 19:40:02.13847791 +0000 UTC m=+1581.020363905" lastFinishedPulling="2025-12-05 19:40:13.829043302 +0000 UTC m=+1592.710929297" observedRunningTime="2025-12-05 19:40:15.260283479 +0000 UTC m=+1594.142169474" watchObservedRunningTime="2025-12-05 19:40:15.265616463 +0000 UTC m=+1594.147502458" Dec 05 19:40:20 crc kubenswrapper[4982]: I1205 19:40:20.349095 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:20 crc kubenswrapper[4982]: I1205 19:40:20.349845 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:21 crc kubenswrapper[4982]: I1205 19:40:21.416283 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-wz6vt" podUID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" containerName="registry-server" probeResult="failure" output=< Dec 05 19:40:21 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Dec 05 19:40:21 crc kubenswrapper[4982]: > Dec 05 19:40:26 crc kubenswrapper[4982]: I1205 19:40:26.351500 4982 generic.go:334] "Generic (PLEG): container finished" podID="1458b955-6299-4a91-a904-4146c620e208" containerID="0d06824a6aae27c51b69afca35b5d914c6077acd72fd394cdb5b629d6ac9b22e" exitCode=0 Dec 05 19:40:26 crc kubenswrapper[4982]: I1205 19:40:26.351573 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" event={"ID":"1458b955-6299-4a91-a904-4146c620e208","Type":"ContainerDied","Data":"0d06824a6aae27c51b69afca35b5d914c6077acd72fd394cdb5b629d6ac9b22e"} Dec 05 19:40:27 crc kubenswrapper[4982]: I1205 19:40:27.880599 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.003257 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-repo-setup-combined-ca-bundle\") pod \"1458b955-6299-4a91-a904-4146c620e208\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.003323 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-inventory\") pod \"1458b955-6299-4a91-a904-4146c620e208\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.003363 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jx5p4\" (UniqueName: \"kubernetes.io/projected/1458b955-6299-4a91-a904-4146c620e208-kube-api-access-jx5p4\") pod \"1458b955-6299-4a91-a904-4146c620e208\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.003467 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-ssh-key\") pod \"1458b955-6299-4a91-a904-4146c620e208\" (UID: \"1458b955-6299-4a91-a904-4146c620e208\") " Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.009422 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "1458b955-6299-4a91-a904-4146c620e208" (UID: "1458b955-6299-4a91-a904-4146c620e208"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.011138 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1458b955-6299-4a91-a904-4146c620e208-kube-api-access-jx5p4" (OuterVolumeSpecName: "kube-api-access-jx5p4") pod "1458b955-6299-4a91-a904-4146c620e208" (UID: "1458b955-6299-4a91-a904-4146c620e208"). InnerVolumeSpecName "kube-api-access-jx5p4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.038575 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-inventory" (OuterVolumeSpecName: "inventory") pod "1458b955-6299-4a91-a904-4146c620e208" (UID: "1458b955-6299-4a91-a904-4146c620e208"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.048432 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1458b955-6299-4a91-a904-4146c620e208" (UID: "1458b955-6299-4a91-a904-4146c620e208"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.108652 4982 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.108708 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.108725 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jx5p4\" (UniqueName: \"kubernetes.io/projected/1458b955-6299-4a91-a904-4146c620e208-kube-api-access-jx5p4\") on node \"crc\" DevicePath \"\"" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.108738 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1458b955-6299-4a91-a904-4146c620e208-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.384715 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" event={"ID":"1458b955-6299-4a91-a904-4146c620e208","Type":"ContainerDied","Data":"67ad5148aa63168bd98c204e7cb7e0839557e3405ecf36cb4ce938df6bb54557"} Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.384774 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.384783 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67ad5148aa63168bd98c204e7cb7e0839557e3405ecf36cb4ce938df6bb54557" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.477067 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp"] Dec 05 19:40:28 crc kubenswrapper[4982]: E1205 19:40:28.477701 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1458b955-6299-4a91-a904-4146c620e208" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.477731 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1458b955-6299-4a91-a904-4146c620e208" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.477999 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1458b955-6299-4a91-a904-4146c620e208" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.479066 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.483734 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.483948 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.483976 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.489677 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.489764 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp"] Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.623763 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5eedd2f2-bb50-4da7-846d-000d03e17934-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lk6dp\" (UID: \"5eedd2f2-bb50-4da7-846d-000d03e17934\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.623900 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpmqj\" (UniqueName: \"kubernetes.io/projected/5eedd2f2-bb50-4da7-846d-000d03e17934-kube-api-access-kpmqj\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lk6dp\" (UID: \"5eedd2f2-bb50-4da7-846d-000d03e17934\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.624638 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eedd2f2-bb50-4da7-846d-000d03e17934-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lk6dp\" (UID: \"5eedd2f2-bb50-4da7-846d-000d03e17934\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.726128 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpmqj\" (UniqueName: \"kubernetes.io/projected/5eedd2f2-bb50-4da7-846d-000d03e17934-kube-api-access-kpmqj\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lk6dp\" (UID: \"5eedd2f2-bb50-4da7-846d-000d03e17934\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.726384 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eedd2f2-bb50-4da7-846d-000d03e17934-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lk6dp\" (UID: \"5eedd2f2-bb50-4da7-846d-000d03e17934\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.726492 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5eedd2f2-bb50-4da7-846d-000d03e17934-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lk6dp\" (UID: \"5eedd2f2-bb50-4da7-846d-000d03e17934\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.733645 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eedd2f2-bb50-4da7-846d-000d03e17934-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lk6dp\" (UID: \"5eedd2f2-bb50-4da7-846d-000d03e17934\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.735713 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5eedd2f2-bb50-4da7-846d-000d03e17934-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lk6dp\" (UID: \"5eedd2f2-bb50-4da7-846d-000d03e17934\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.745328 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpmqj\" (UniqueName: \"kubernetes.io/projected/5eedd2f2-bb50-4da7-846d-000d03e17934-kube-api-access-kpmqj\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-lk6dp\" (UID: \"5eedd2f2-bb50-4da7-846d-000d03e17934\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:28 crc kubenswrapper[4982]: I1205 19:40:28.856709 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:29 crc kubenswrapper[4982]: I1205 19:40:29.455399 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp"] Dec 05 19:40:30 crc kubenswrapper[4982]: I1205 19:40:30.132346 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 19:40:30 crc kubenswrapper[4982]: I1205 19:40:30.207358 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 19:40:30 crc kubenswrapper[4982]: I1205 19:40:30.451198 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" event={"ID":"5eedd2f2-bb50-4da7-846d-000d03e17934","Type":"ContainerStarted","Data":"ea11a72a7023eff71e3a39a7361d695a5ccaa0fcb10fcd6c2f718b652020dbde"} Dec 05 19:40:30 crc kubenswrapper[4982]: I1205 19:40:30.451245 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" event={"ID":"5eedd2f2-bb50-4da7-846d-000d03e17934","Type":"ContainerStarted","Data":"1416ee058f7487da5f68286e0a593d82a9285cc753ee0d521fd38f5ed44291d9"} Dec 05 19:40:30 crc kubenswrapper[4982]: I1205 19:40:30.472563 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:30 crc kubenswrapper[4982]: I1205 19:40:30.478738 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" podStartSLOduration=2.013393934 podStartE2EDuration="2.478716157s" podCreationTimestamp="2025-12-05 19:40:28 +0000 UTC" firstStartedPulling="2025-12-05 19:40:29.452335211 +0000 UTC m=+1608.334221216" lastFinishedPulling="2025-12-05 19:40:29.917657434 +0000 UTC m=+1608.799543439" observedRunningTime="2025-12-05 19:40:30.477552998 +0000 UTC m=+1609.359438993" watchObservedRunningTime="2025-12-05 19:40:30.478716157 +0000 UTC m=+1609.360602152" Dec 05 19:40:30 crc kubenswrapper[4982]: I1205 19:40:30.608743 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:31 crc kubenswrapper[4982]: I1205 19:40:31.226947 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wz6vt"] Dec 05 19:40:32 crc kubenswrapper[4982]: I1205 19:40:32.471334 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-wz6vt" podUID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" containerName="registry-server" containerID="cri-o://f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127" gracePeriod=2 Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.065448 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.175018 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7b99s\" (UniqueName: \"kubernetes.io/projected/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-kube-api-access-7b99s\") pod \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\" (UID: \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\") " Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.175057 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-catalog-content\") pod \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\" (UID: \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\") " Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.175231 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-utilities\") pod \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\" (UID: \"269a87e4-916c-4fbc-a087-ffcb85f9dfd1\") " Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.175923 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-utilities" (OuterVolumeSpecName: "utilities") pod "269a87e4-916c-4fbc-a087-ffcb85f9dfd1" (UID: "269a87e4-916c-4fbc-a087-ffcb85f9dfd1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.185488 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-kube-api-access-7b99s" (OuterVolumeSpecName: "kube-api-access-7b99s") pod "269a87e4-916c-4fbc-a087-ffcb85f9dfd1" (UID: "269a87e4-916c-4fbc-a087-ffcb85f9dfd1"). InnerVolumeSpecName "kube-api-access-7b99s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.238226 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "269a87e4-916c-4fbc-a087-ffcb85f9dfd1" (UID: "269a87e4-916c-4fbc-a087-ffcb85f9dfd1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.278051 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7b99s\" (UniqueName: \"kubernetes.io/projected/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-kube-api-access-7b99s\") on node \"crc\" DevicePath \"\"" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.278088 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.278099 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/269a87e4-916c-4fbc-a087-ffcb85f9dfd1-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.340263 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.497965 4982 generic.go:334] "Generic (PLEG): container finished" podID="5eedd2f2-bb50-4da7-846d-000d03e17934" containerID="ea11a72a7023eff71e3a39a7361d695a5ccaa0fcb10fcd6c2f718b652020dbde" exitCode=0 Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.498406 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" event={"ID":"5eedd2f2-bb50-4da7-846d-000d03e17934","Type":"ContainerDied","Data":"ea11a72a7023eff71e3a39a7361d695a5ccaa0fcb10fcd6c2f718b652020dbde"} Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.503445 4982 generic.go:334] "Generic (PLEG): container finished" podID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" containerID="f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127" exitCode=0 Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.503492 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz6vt" event={"ID":"269a87e4-916c-4fbc-a087-ffcb85f9dfd1","Type":"ContainerDied","Data":"f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127"} Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.503522 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-wz6vt" event={"ID":"269a87e4-916c-4fbc-a087-ffcb85f9dfd1","Type":"ContainerDied","Data":"f4619fd50de04b2b23ddc3156e93d7d4664fccd13dae17737e497d9286548b92"} Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.503543 4982 scope.go:117] "RemoveContainer" containerID="f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.503718 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-wz6vt" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.563990 4982 scope.go:117] "RemoveContainer" containerID="0562e875165c6b81f485355e04c1557e0a06c3fa4cb922bb8d2f7f1c5006c605" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.658313 4982 scope.go:117] "RemoveContainer" containerID="2fa00262dbb24816a59f331c0e6f8edd564f679994fb1354e46ebf9b219b07a1" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.671443 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-wz6vt"] Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.699642 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-wz6vt"] Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.710340 4982 scope.go:117] "RemoveContainer" containerID="f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127" Dec 05 19:40:33 crc kubenswrapper[4982]: E1205 19:40:33.713272 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127\": container with ID starting with f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127 not found: ID does not exist" containerID="f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.713311 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127"} err="failed to get container status \"f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127\": rpc error: code = NotFound desc = could not find container \"f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127\": container with ID starting with f14851f534ced30705b23621efdcb4f04722d8db9063bfb3a118d2b1f87f6127 not found: ID does not exist" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.713333 4982 scope.go:117] "RemoveContainer" containerID="0562e875165c6b81f485355e04c1557e0a06c3fa4cb922bb8d2f7f1c5006c605" Dec 05 19:40:33 crc kubenswrapper[4982]: E1205 19:40:33.718609 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0562e875165c6b81f485355e04c1557e0a06c3fa4cb922bb8d2f7f1c5006c605\": container with ID starting with 0562e875165c6b81f485355e04c1557e0a06c3fa4cb922bb8d2f7f1c5006c605 not found: ID does not exist" containerID="0562e875165c6b81f485355e04c1557e0a06c3fa4cb922bb8d2f7f1c5006c605" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.718637 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0562e875165c6b81f485355e04c1557e0a06c3fa4cb922bb8d2f7f1c5006c605"} err="failed to get container status \"0562e875165c6b81f485355e04c1557e0a06c3fa4cb922bb8d2f7f1c5006c605\": rpc error: code = NotFound desc = could not find container \"0562e875165c6b81f485355e04c1557e0a06c3fa4cb922bb8d2f7f1c5006c605\": container with ID starting with 0562e875165c6b81f485355e04c1557e0a06c3fa4cb922bb8d2f7f1c5006c605 not found: ID does not exist" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.718655 4982 scope.go:117] "RemoveContainer" containerID="2fa00262dbb24816a59f331c0e6f8edd564f679994fb1354e46ebf9b219b07a1" Dec 05 19:40:33 crc kubenswrapper[4982]: E1205 19:40:33.725441 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fa00262dbb24816a59f331c0e6f8edd564f679994fb1354e46ebf9b219b07a1\": container with ID starting with 2fa00262dbb24816a59f331c0e6f8edd564f679994fb1354e46ebf9b219b07a1 not found: ID does not exist" containerID="2fa00262dbb24816a59f331c0e6f8edd564f679994fb1354e46ebf9b219b07a1" Dec 05 19:40:33 crc kubenswrapper[4982]: I1205 19:40:33.725504 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fa00262dbb24816a59f331c0e6f8edd564f679994fb1354e46ebf9b219b07a1"} err="failed to get container status \"2fa00262dbb24816a59f331c0e6f8edd564f679994fb1354e46ebf9b219b07a1\": rpc error: code = NotFound desc = could not find container \"2fa00262dbb24816a59f331c0e6f8edd564f679994fb1354e46ebf9b219b07a1\": container with ID starting with 2fa00262dbb24816a59f331c0e6f8edd564f679994fb1354e46ebf9b219b07a1 not found: ID does not exist" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.030201 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.138003 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eedd2f2-bb50-4da7-846d-000d03e17934-inventory\") pod \"5eedd2f2-bb50-4da7-846d-000d03e17934\" (UID: \"5eedd2f2-bb50-4da7-846d-000d03e17934\") " Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.138054 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5eedd2f2-bb50-4da7-846d-000d03e17934-ssh-key\") pod \"5eedd2f2-bb50-4da7-846d-000d03e17934\" (UID: \"5eedd2f2-bb50-4da7-846d-000d03e17934\") " Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.138109 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpmqj\" (UniqueName: \"kubernetes.io/projected/5eedd2f2-bb50-4da7-846d-000d03e17934-kube-api-access-kpmqj\") pod \"5eedd2f2-bb50-4da7-846d-000d03e17934\" (UID: \"5eedd2f2-bb50-4da7-846d-000d03e17934\") " Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.143864 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5eedd2f2-bb50-4da7-846d-000d03e17934-kube-api-access-kpmqj" (OuterVolumeSpecName: "kube-api-access-kpmqj") pod "5eedd2f2-bb50-4da7-846d-000d03e17934" (UID: "5eedd2f2-bb50-4da7-846d-000d03e17934"). InnerVolumeSpecName "kube-api-access-kpmqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.169402 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eedd2f2-bb50-4da7-846d-000d03e17934-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5eedd2f2-bb50-4da7-846d-000d03e17934" (UID: "5eedd2f2-bb50-4da7-846d-000d03e17934"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.186506 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5eedd2f2-bb50-4da7-846d-000d03e17934-inventory" (OuterVolumeSpecName: "inventory") pod "5eedd2f2-bb50-4da7-846d-000d03e17934" (UID: "5eedd2f2-bb50-4da7-846d-000d03e17934"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.241034 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5eedd2f2-bb50-4da7-846d-000d03e17934-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.241072 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5eedd2f2-bb50-4da7-846d-000d03e17934-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.241085 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpmqj\" (UniqueName: \"kubernetes.io/projected/5eedd2f2-bb50-4da7-846d-000d03e17934-kube-api-access-kpmqj\") on node \"crc\" DevicePath \"\"" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.405734 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" path="/var/lib/kubelet/pods/269a87e4-916c-4fbc-a087-ffcb85f9dfd1/volumes" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.538822 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" event={"ID":"5eedd2f2-bb50-4da7-846d-000d03e17934","Type":"ContainerDied","Data":"1416ee058f7487da5f68286e0a593d82a9285cc753ee0d521fd38f5ed44291d9"} Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.538866 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1416ee058f7487da5f68286e0a593d82a9285cc753ee0d521fd38f5ed44291d9" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.538881 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-lk6dp" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.625247 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt"] Dec 05 19:40:35 crc kubenswrapper[4982]: E1205 19:40:35.625708 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" containerName="registry-server" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.625728 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" containerName="registry-server" Dec 05 19:40:35 crc kubenswrapper[4982]: E1205 19:40:35.625751 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" containerName="extract-utilities" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.625760 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" containerName="extract-utilities" Dec 05 19:40:35 crc kubenswrapper[4982]: E1205 19:40:35.625778 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" containerName="extract-content" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.625788 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" containerName="extract-content" Dec 05 19:40:35 crc kubenswrapper[4982]: E1205 19:40:35.625823 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5eedd2f2-bb50-4da7-846d-000d03e17934" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.625834 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="5eedd2f2-bb50-4da7-846d-000d03e17934" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.627839 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="269a87e4-916c-4fbc-a087-ffcb85f9dfd1" containerName="registry-server" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.627861 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="5eedd2f2-bb50-4da7-846d-000d03e17934" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.628550 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.631692 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.631874 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.631880 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.633311 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.638574 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt"] Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.751726 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.751819 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.751860 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.752031 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5bdt\" (UniqueName: \"kubernetes.io/projected/3cd634f3-b987-404a-a10b-609341e2b548-kube-api-access-b5bdt\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.853873 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.853945 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.854006 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.854778 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5bdt\" (UniqueName: \"kubernetes.io/projected/3cd634f3-b987-404a-a10b-609341e2b548-kube-api-access-b5bdt\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.857333 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.857538 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.859575 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.886120 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5bdt\" (UniqueName: \"kubernetes.io/projected/3cd634f3-b987-404a-a10b-609341e2b548-kube-api-access-b5bdt\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:35 crc kubenswrapper[4982]: I1205 19:40:35.948061 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:40:36 crc kubenswrapper[4982]: I1205 19:40:36.538478 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt"] Dec 05 19:40:37 crc kubenswrapper[4982]: I1205 19:40:37.561836 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" event={"ID":"3cd634f3-b987-404a-a10b-609341e2b548","Type":"ContainerStarted","Data":"4a26642636b616b013a0eea4965fe465c960a540994c42ea4db7ddfc7d5622a0"} Dec 05 19:40:37 crc kubenswrapper[4982]: I1205 19:40:37.562546 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" event={"ID":"3cd634f3-b987-404a-a10b-609341e2b548","Type":"ContainerStarted","Data":"559808d278444a0c6ea1eec621d7ed3355c21bc92a459510575edc136bd602ed"} Dec 05 19:40:37 crc kubenswrapper[4982]: I1205 19:40:37.614286 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" podStartSLOduration=2.183393422 podStartE2EDuration="2.614265087s" podCreationTimestamp="2025-12-05 19:40:35 +0000 UTC" firstStartedPulling="2025-12-05 19:40:36.555836165 +0000 UTC m=+1615.437722170" lastFinishedPulling="2025-12-05 19:40:36.98670782 +0000 UTC m=+1615.868593835" observedRunningTime="2025-12-05 19:40:37.578459566 +0000 UTC m=+1616.460345571" watchObservedRunningTime="2025-12-05 19:40:37.614265087 +0000 UTC m=+1616.496151092" Dec 05 19:40:42 crc kubenswrapper[4982]: I1205 19:40:42.557624 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:40:42 crc kubenswrapper[4982]: I1205 19:40:42.558191 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:40:42 crc kubenswrapper[4982]: I1205 19:40:42.558240 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:40:42 crc kubenswrapper[4982]: I1205 19:40:42.559065 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 19:40:42 crc kubenswrapper[4982]: I1205 19:40:42.559132 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" gracePeriod=600 Dec 05 19:40:42 crc kubenswrapper[4982]: E1205 19:40:42.680732 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:40:43 crc kubenswrapper[4982]: I1205 19:40:43.643290 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" exitCode=0 Dec 05 19:40:43 crc kubenswrapper[4982]: I1205 19:40:43.643358 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313"} Dec 05 19:40:43 crc kubenswrapper[4982]: I1205 19:40:43.643407 4982 scope.go:117] "RemoveContainer" containerID="d120ed4dd81891b8a49ab0c0d10c2698410eb2dc25101b25c8e0a67336b5afef" Dec 05 19:40:43 crc kubenswrapper[4982]: I1205 19:40:43.644490 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:40:43 crc kubenswrapper[4982]: E1205 19:40:43.645047 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.326949 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qjrhb"] Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.331628 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.373129 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qjrhb"] Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.410527 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-catalog-content\") pod \"certified-operators-qjrhb\" (UID: \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\") " pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.410788 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mwt9\" (UniqueName: \"kubernetes.io/projected/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-kube-api-access-4mwt9\") pod \"certified-operators-qjrhb\" (UID: \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\") " pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.410849 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-utilities\") pod \"certified-operators-qjrhb\" (UID: \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\") " pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.512379 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-catalog-content\") pod \"certified-operators-qjrhb\" (UID: \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\") " pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.512487 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mwt9\" (UniqueName: \"kubernetes.io/projected/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-kube-api-access-4mwt9\") pod \"certified-operators-qjrhb\" (UID: \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\") " pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.512516 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-utilities\") pod \"certified-operators-qjrhb\" (UID: \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\") " pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.512967 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-catalog-content\") pod \"certified-operators-qjrhb\" (UID: \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\") " pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.513016 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-utilities\") pod \"certified-operators-qjrhb\" (UID: \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\") " pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.530841 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mwt9\" (UniqueName: \"kubernetes.io/projected/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-kube-api-access-4mwt9\") pod \"certified-operators-qjrhb\" (UID: \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\") " pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:47 crc kubenswrapper[4982]: I1205 19:40:47.655932 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:48 crc kubenswrapper[4982]: I1205 19:40:48.177262 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qjrhb"] Dec 05 19:40:48 crc kubenswrapper[4982]: I1205 19:40:48.227064 4982 scope.go:117] "RemoveContainer" containerID="31449d61eb9812aaa9e01f33f8963cdbf64b5b3447951531b48886b715d5ac59" Dec 05 19:40:48 crc kubenswrapper[4982]: I1205 19:40:48.288925 4982 scope.go:117] "RemoveContainer" containerID="a7f35ea5929175b17a3da02549a1fb7d078007dde24f98ca3dab4a52263be641" Dec 05 19:40:48 crc kubenswrapper[4982]: I1205 19:40:48.325446 4982 scope.go:117] "RemoveContainer" containerID="d016887d0517c40d737dc3916a72583b36e50fdb88d1d2d3fa9ac22427453173" Dec 05 19:40:48 crc kubenswrapper[4982]: I1205 19:40:48.718485 4982 generic.go:334] "Generic (PLEG): container finished" podID="cd2fcfdd-6d28-4db4-a71e-67da9b228bac" containerID="a034286e21e959456847d2f9bb09cf02aafd2f69f36fd1424d26bdded06d4828" exitCode=0 Dec 05 19:40:48 crc kubenswrapper[4982]: I1205 19:40:48.718715 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjrhb" event={"ID":"cd2fcfdd-6d28-4db4-a71e-67da9b228bac","Type":"ContainerDied","Data":"a034286e21e959456847d2f9bb09cf02aafd2f69f36fd1424d26bdded06d4828"} Dec 05 19:40:48 crc kubenswrapper[4982]: I1205 19:40:48.718820 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjrhb" event={"ID":"cd2fcfdd-6d28-4db4-a71e-67da9b228bac","Type":"ContainerStarted","Data":"25f19280fb746b86b6fb25a9f37bb40afcf3b5a4a8227726a96e641b6b275c52"} Dec 05 19:40:49 crc kubenswrapper[4982]: I1205 19:40:49.736911 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjrhb" event={"ID":"cd2fcfdd-6d28-4db4-a71e-67da9b228bac","Type":"ContainerStarted","Data":"ac802b857342e0489e05bb3e31bbe32c9c34f37c329ad77322bf88343d558f93"} Dec 05 19:40:50 crc kubenswrapper[4982]: I1205 19:40:50.750622 4982 generic.go:334] "Generic (PLEG): container finished" podID="cd2fcfdd-6d28-4db4-a71e-67da9b228bac" containerID="ac802b857342e0489e05bb3e31bbe32c9c34f37c329ad77322bf88343d558f93" exitCode=0 Dec 05 19:40:50 crc kubenswrapper[4982]: I1205 19:40:50.750683 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjrhb" event={"ID":"cd2fcfdd-6d28-4db4-a71e-67da9b228bac","Type":"ContainerDied","Data":"ac802b857342e0489e05bb3e31bbe32c9c34f37c329ad77322bf88343d558f93"} Dec 05 19:40:50 crc kubenswrapper[4982]: I1205 19:40:50.753268 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 19:40:51 crc kubenswrapper[4982]: I1205 19:40:51.762828 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjrhb" event={"ID":"cd2fcfdd-6d28-4db4-a71e-67da9b228bac","Type":"ContainerStarted","Data":"5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867"} Dec 05 19:40:51 crc kubenswrapper[4982]: I1205 19:40:51.784086 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qjrhb" podStartSLOduration=2.350184145 podStartE2EDuration="4.784069336s" podCreationTimestamp="2025-12-05 19:40:47 +0000 UTC" firstStartedPulling="2025-12-05 19:40:48.722394443 +0000 UTC m=+1627.604280438" lastFinishedPulling="2025-12-05 19:40:51.156279624 +0000 UTC m=+1630.038165629" observedRunningTime="2025-12-05 19:40:51.778994509 +0000 UTC m=+1630.660880514" watchObservedRunningTime="2025-12-05 19:40:51.784069336 +0000 UTC m=+1630.665955331" Dec 05 19:40:57 crc kubenswrapper[4982]: I1205 19:40:57.390368 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:40:57 crc kubenswrapper[4982]: E1205 19:40:57.391313 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:40:57 crc kubenswrapper[4982]: I1205 19:40:57.657189 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:57 crc kubenswrapper[4982]: I1205 19:40:57.657592 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:57 crc kubenswrapper[4982]: I1205 19:40:57.705329 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:57 crc kubenswrapper[4982]: I1205 19:40:57.879549 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:40:57 crc kubenswrapper[4982]: I1205 19:40:57.956110 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qjrhb"] Dec 05 19:40:59 crc kubenswrapper[4982]: I1205 19:40:59.860811 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qjrhb" podUID="cd2fcfdd-6d28-4db4-a71e-67da9b228bac" containerName="registry-server" containerID="cri-o://5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867" gracePeriod=2 Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.435706 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.591019 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-catalog-content\") pod \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\" (UID: \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\") " Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.591246 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mwt9\" (UniqueName: \"kubernetes.io/projected/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-kube-api-access-4mwt9\") pod \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\" (UID: \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\") " Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.591648 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-utilities\") pod \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\" (UID: \"cd2fcfdd-6d28-4db4-a71e-67da9b228bac\") " Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.593226 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-utilities" (OuterVolumeSpecName: "utilities") pod "cd2fcfdd-6d28-4db4-a71e-67da9b228bac" (UID: "cd2fcfdd-6d28-4db4-a71e-67da9b228bac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.598519 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-kube-api-access-4mwt9" (OuterVolumeSpecName: "kube-api-access-4mwt9") pod "cd2fcfdd-6d28-4db4-a71e-67da9b228bac" (UID: "cd2fcfdd-6d28-4db4-a71e-67da9b228bac"). InnerVolumeSpecName "kube-api-access-4mwt9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.644914 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cd2fcfdd-6d28-4db4-a71e-67da9b228bac" (UID: "cd2fcfdd-6d28-4db4-a71e-67da9b228bac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.694508 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.694545 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.694558 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mwt9\" (UniqueName: \"kubernetes.io/projected/cd2fcfdd-6d28-4db4-a71e-67da9b228bac-kube-api-access-4mwt9\") on node \"crc\" DevicePath \"\"" Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.875032 4982 generic.go:334] "Generic (PLEG): container finished" podID="cd2fcfdd-6d28-4db4-a71e-67da9b228bac" containerID="5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867" exitCode=0 Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.875081 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjrhb" event={"ID":"cd2fcfdd-6d28-4db4-a71e-67da9b228bac","Type":"ContainerDied","Data":"5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867"} Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.875112 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qjrhb" event={"ID":"cd2fcfdd-6d28-4db4-a71e-67da9b228bac","Type":"ContainerDied","Data":"25f19280fb746b86b6fb25a9f37bb40afcf3b5a4a8227726a96e641b6b275c52"} Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.875134 4982 scope.go:117] "RemoveContainer" containerID="5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867" Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.875300 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qjrhb" Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.913830 4982 scope.go:117] "RemoveContainer" containerID="ac802b857342e0489e05bb3e31bbe32c9c34f37c329ad77322bf88343d558f93" Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.913995 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qjrhb"] Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.926841 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qjrhb"] Dec 05 19:41:00 crc kubenswrapper[4982]: I1205 19:41:00.952705 4982 scope.go:117] "RemoveContainer" containerID="a034286e21e959456847d2f9bb09cf02aafd2f69f36fd1424d26bdded06d4828" Dec 05 19:41:01 crc kubenswrapper[4982]: I1205 19:41:01.003314 4982 scope.go:117] "RemoveContainer" containerID="5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867" Dec 05 19:41:01 crc kubenswrapper[4982]: E1205 19:41:01.004874 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867\": container with ID starting with 5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867 not found: ID does not exist" containerID="5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867" Dec 05 19:41:01 crc kubenswrapper[4982]: I1205 19:41:01.004938 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867"} err="failed to get container status \"5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867\": rpc error: code = NotFound desc = could not find container \"5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867\": container with ID starting with 5beba5f783b93efef21de760da5165d4347a3b05c0baf699095c9a0434696867 not found: ID does not exist" Dec 05 19:41:01 crc kubenswrapper[4982]: I1205 19:41:01.004993 4982 scope.go:117] "RemoveContainer" containerID="ac802b857342e0489e05bb3e31bbe32c9c34f37c329ad77322bf88343d558f93" Dec 05 19:41:01 crc kubenswrapper[4982]: E1205 19:41:01.005645 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac802b857342e0489e05bb3e31bbe32c9c34f37c329ad77322bf88343d558f93\": container with ID starting with ac802b857342e0489e05bb3e31bbe32c9c34f37c329ad77322bf88343d558f93 not found: ID does not exist" containerID="ac802b857342e0489e05bb3e31bbe32c9c34f37c329ad77322bf88343d558f93" Dec 05 19:41:01 crc kubenswrapper[4982]: I1205 19:41:01.005678 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac802b857342e0489e05bb3e31bbe32c9c34f37c329ad77322bf88343d558f93"} err="failed to get container status \"ac802b857342e0489e05bb3e31bbe32c9c34f37c329ad77322bf88343d558f93\": rpc error: code = NotFound desc = could not find container \"ac802b857342e0489e05bb3e31bbe32c9c34f37c329ad77322bf88343d558f93\": container with ID starting with ac802b857342e0489e05bb3e31bbe32c9c34f37c329ad77322bf88343d558f93 not found: ID does not exist" Dec 05 19:41:01 crc kubenswrapper[4982]: I1205 19:41:01.005702 4982 scope.go:117] "RemoveContainer" containerID="a034286e21e959456847d2f9bb09cf02aafd2f69f36fd1424d26bdded06d4828" Dec 05 19:41:01 crc kubenswrapper[4982]: E1205 19:41:01.005979 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a034286e21e959456847d2f9bb09cf02aafd2f69f36fd1424d26bdded06d4828\": container with ID starting with a034286e21e959456847d2f9bb09cf02aafd2f69f36fd1424d26bdded06d4828 not found: ID does not exist" containerID="a034286e21e959456847d2f9bb09cf02aafd2f69f36fd1424d26bdded06d4828" Dec 05 19:41:01 crc kubenswrapper[4982]: I1205 19:41:01.006012 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a034286e21e959456847d2f9bb09cf02aafd2f69f36fd1424d26bdded06d4828"} err="failed to get container status \"a034286e21e959456847d2f9bb09cf02aafd2f69f36fd1424d26bdded06d4828\": rpc error: code = NotFound desc = could not find container \"a034286e21e959456847d2f9bb09cf02aafd2f69f36fd1424d26bdded06d4828\": container with ID starting with a034286e21e959456847d2f9bb09cf02aafd2f69f36fd1424d26bdded06d4828 not found: ID does not exist" Dec 05 19:41:01 crc kubenswrapper[4982]: I1205 19:41:01.412320 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd2fcfdd-6d28-4db4-a71e-67da9b228bac" path="/var/lib/kubelet/pods/cd2fcfdd-6d28-4db4-a71e-67da9b228bac/volumes" Dec 05 19:41:10 crc kubenswrapper[4982]: I1205 19:41:10.391131 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:41:10 crc kubenswrapper[4982]: E1205 19:41:10.392091 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:41:25 crc kubenswrapper[4982]: I1205 19:41:25.400769 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:41:25 crc kubenswrapper[4982]: E1205 19:41:25.401709 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:41:40 crc kubenswrapper[4982]: I1205 19:41:40.391322 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:41:40 crc kubenswrapper[4982]: E1205 19:41:40.392573 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:41:48 crc kubenswrapper[4982]: I1205 19:41:48.508565 4982 scope.go:117] "RemoveContainer" containerID="3b80ddac6063bb6f87b2ba84d52d005d5acae4b4ba923c8336963ddccccc3f68" Dec 05 19:41:55 crc kubenswrapper[4982]: I1205 19:41:55.390838 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:41:55 crc kubenswrapper[4982]: E1205 19:41:55.394348 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:42:09 crc kubenswrapper[4982]: I1205 19:42:09.390166 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:42:09 crc kubenswrapper[4982]: E1205 19:42:09.390925 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:42:20 crc kubenswrapper[4982]: I1205 19:42:20.391009 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:42:20 crc kubenswrapper[4982]: E1205 19:42:20.392262 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:42:35 crc kubenswrapper[4982]: I1205 19:42:35.396736 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:42:35 crc kubenswrapper[4982]: E1205 19:42:35.401266 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:42:48 crc kubenswrapper[4982]: I1205 19:42:48.610005 4982 scope.go:117] "RemoveContainer" containerID="e8a6fd8175e38f99a4a2830096119f3de1dd95038840e6da5af5ca0f93cdf1b7" Dec 05 19:42:48 crc kubenswrapper[4982]: I1205 19:42:48.654899 4982 scope.go:117] "RemoveContainer" containerID="01ae03145b267388793b89030fca5a97ed9a535ccf57f30bf43a4f557471276b" Dec 05 19:42:48 crc kubenswrapper[4982]: I1205 19:42:48.718103 4982 scope.go:117] "RemoveContainer" containerID="512ccba067ae54033baaff360d39aa1504dc05153ed0c35108c24328445c0909" Dec 05 19:42:48 crc kubenswrapper[4982]: I1205 19:42:48.749773 4982 scope.go:117] "RemoveContainer" containerID="3c5c6b38f7d8a96d87cf298d0d83c8935c9ef46e44a1d63d7de777817eda4e99" Dec 05 19:42:48 crc kubenswrapper[4982]: I1205 19:42:48.781423 4982 scope.go:117] "RemoveContainer" containerID="c0ddb2f449db60e1d95e0afea07e0839af1496fea3b5f68d3503163e2c7b1184" Dec 05 19:42:50 crc kubenswrapper[4982]: I1205 19:42:50.390846 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:42:50 crc kubenswrapper[4982]: E1205 19:42:50.391407 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:43:01 crc kubenswrapper[4982]: I1205 19:43:01.415005 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:43:01 crc kubenswrapper[4982]: E1205 19:43:01.416023 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:43:13 crc kubenswrapper[4982]: I1205 19:43:13.390183 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:43:13 crc kubenswrapper[4982]: E1205 19:43:13.390899 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.125787 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7x64j"] Dec 05 19:43:26 crc kubenswrapper[4982]: E1205 19:43:26.128366 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2fcfdd-6d28-4db4-a71e-67da9b228bac" containerName="extract-content" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.128471 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2fcfdd-6d28-4db4-a71e-67da9b228bac" containerName="extract-content" Dec 05 19:43:26 crc kubenswrapper[4982]: E1205 19:43:26.128575 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2fcfdd-6d28-4db4-a71e-67da9b228bac" containerName="registry-server" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.128645 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2fcfdd-6d28-4db4-a71e-67da9b228bac" containerName="registry-server" Dec 05 19:43:26 crc kubenswrapper[4982]: E1205 19:43:26.128718 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2fcfdd-6d28-4db4-a71e-67da9b228bac" containerName="extract-utilities" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.128777 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2fcfdd-6d28-4db4-a71e-67da9b228bac" containerName="extract-utilities" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.129018 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd2fcfdd-6d28-4db4-a71e-67da9b228bac" containerName="registry-server" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.130648 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.148023 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7x64j"] Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.301380 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5c4e08e-42b0-4901-98e9-980c9f54b060-utilities\") pod \"redhat-marketplace-7x64j\" (UID: \"f5c4e08e-42b0-4901-98e9-980c9f54b060\") " pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.301864 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqqqd\" (UniqueName: \"kubernetes.io/projected/f5c4e08e-42b0-4901-98e9-980c9f54b060-kube-api-access-rqqqd\") pod \"redhat-marketplace-7x64j\" (UID: \"f5c4e08e-42b0-4901-98e9-980c9f54b060\") " pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.302133 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5c4e08e-42b0-4901-98e9-980c9f54b060-catalog-content\") pod \"redhat-marketplace-7x64j\" (UID: \"f5c4e08e-42b0-4901-98e9-980c9f54b060\") " pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.391831 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:43:26 crc kubenswrapper[4982]: E1205 19:43:26.392175 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.404174 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqqqd\" (UniqueName: \"kubernetes.io/projected/f5c4e08e-42b0-4901-98e9-980c9f54b060-kube-api-access-rqqqd\") pod \"redhat-marketplace-7x64j\" (UID: \"f5c4e08e-42b0-4901-98e9-980c9f54b060\") " pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.404278 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5c4e08e-42b0-4901-98e9-980c9f54b060-catalog-content\") pod \"redhat-marketplace-7x64j\" (UID: \"f5c4e08e-42b0-4901-98e9-980c9f54b060\") " pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.404346 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5c4e08e-42b0-4901-98e9-980c9f54b060-utilities\") pod \"redhat-marketplace-7x64j\" (UID: \"f5c4e08e-42b0-4901-98e9-980c9f54b060\") " pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.405376 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5c4e08e-42b0-4901-98e9-980c9f54b060-catalog-content\") pod \"redhat-marketplace-7x64j\" (UID: \"f5c4e08e-42b0-4901-98e9-980c9f54b060\") " pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.405415 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5c4e08e-42b0-4901-98e9-980c9f54b060-utilities\") pod \"redhat-marketplace-7x64j\" (UID: \"f5c4e08e-42b0-4901-98e9-980c9f54b060\") " pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.426264 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqqqd\" (UniqueName: \"kubernetes.io/projected/f5c4e08e-42b0-4901-98e9-980c9f54b060-kube-api-access-rqqqd\") pod \"redhat-marketplace-7x64j\" (UID: \"f5c4e08e-42b0-4901-98e9-980c9f54b060\") " pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.453813 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:26 crc kubenswrapper[4982]: I1205 19:43:26.904033 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7x64j"] Dec 05 19:43:27 crc kubenswrapper[4982]: I1205 19:43:27.600774 4982 generic.go:334] "Generic (PLEG): container finished" podID="f5c4e08e-42b0-4901-98e9-980c9f54b060" containerID="4de014732082ad40eaa5217043497e1a45c915bea613890cb21508c9e7aac118" exitCode=0 Dec 05 19:43:27 crc kubenswrapper[4982]: I1205 19:43:27.601081 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x64j" event={"ID":"f5c4e08e-42b0-4901-98e9-980c9f54b060","Type":"ContainerDied","Data":"4de014732082ad40eaa5217043497e1a45c915bea613890cb21508c9e7aac118"} Dec 05 19:43:27 crc kubenswrapper[4982]: I1205 19:43:27.601117 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x64j" event={"ID":"f5c4e08e-42b0-4901-98e9-980c9f54b060","Type":"ContainerStarted","Data":"509751fba4f79d2951d9bb309fdb8634233ce5b24d4b12411e5566b596fc97d5"} Dec 05 19:43:29 crc kubenswrapper[4982]: I1205 19:43:29.619418 4982 generic.go:334] "Generic (PLEG): container finished" podID="f5c4e08e-42b0-4901-98e9-980c9f54b060" containerID="a295023f33c4b39b5e18b132de7031a120159101c25f4f5a64d3e8bfc64989c8" exitCode=0 Dec 05 19:43:29 crc kubenswrapper[4982]: I1205 19:43:29.619982 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x64j" event={"ID":"f5c4e08e-42b0-4901-98e9-980c9f54b060","Type":"ContainerDied","Data":"a295023f33c4b39b5e18b132de7031a120159101c25f4f5a64d3e8bfc64989c8"} Dec 05 19:43:30 crc kubenswrapper[4982]: I1205 19:43:30.635989 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x64j" event={"ID":"f5c4e08e-42b0-4901-98e9-980c9f54b060","Type":"ContainerStarted","Data":"a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30"} Dec 05 19:43:30 crc kubenswrapper[4982]: I1205 19:43:30.662796 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7x64j" podStartSLOduration=2.257626349 podStartE2EDuration="4.662776236s" podCreationTimestamp="2025-12-05 19:43:26 +0000 UTC" firstStartedPulling="2025-12-05 19:43:27.603748309 +0000 UTC m=+1786.485634304" lastFinishedPulling="2025-12-05 19:43:30.008898196 +0000 UTC m=+1788.890784191" observedRunningTime="2025-12-05 19:43:30.658287803 +0000 UTC m=+1789.540173808" watchObservedRunningTime="2025-12-05 19:43:30.662776236 +0000 UTC m=+1789.544662241" Dec 05 19:43:36 crc kubenswrapper[4982]: I1205 19:43:36.454862 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:36 crc kubenswrapper[4982]: I1205 19:43:36.455460 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:36 crc kubenswrapper[4982]: I1205 19:43:36.544588 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:36 crc kubenswrapper[4982]: I1205 19:43:36.792006 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:36 crc kubenswrapper[4982]: I1205 19:43:36.840555 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7x64j"] Dec 05 19:43:37 crc kubenswrapper[4982]: I1205 19:43:37.390090 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:43:37 crc kubenswrapper[4982]: E1205 19:43:37.390549 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:43:38 crc kubenswrapper[4982]: I1205 19:43:38.768290 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7x64j" podUID="f5c4e08e-42b0-4901-98e9-980c9f54b060" containerName="registry-server" containerID="cri-o://a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30" gracePeriod=2 Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.298220 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.378537 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5c4e08e-42b0-4901-98e9-980c9f54b060-catalog-content\") pod \"f5c4e08e-42b0-4901-98e9-980c9f54b060\" (UID: \"f5c4e08e-42b0-4901-98e9-980c9f54b060\") " Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.378693 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5c4e08e-42b0-4901-98e9-980c9f54b060-utilities\") pod \"f5c4e08e-42b0-4901-98e9-980c9f54b060\" (UID: \"f5c4e08e-42b0-4901-98e9-980c9f54b060\") " Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.378748 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqqqd\" (UniqueName: \"kubernetes.io/projected/f5c4e08e-42b0-4901-98e9-980c9f54b060-kube-api-access-rqqqd\") pod \"f5c4e08e-42b0-4901-98e9-980c9f54b060\" (UID: \"f5c4e08e-42b0-4901-98e9-980c9f54b060\") " Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.379968 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5c4e08e-42b0-4901-98e9-980c9f54b060-utilities" (OuterVolumeSpecName: "utilities") pod "f5c4e08e-42b0-4901-98e9-980c9f54b060" (UID: "f5c4e08e-42b0-4901-98e9-980c9f54b060"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.384468 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5c4e08e-42b0-4901-98e9-980c9f54b060-kube-api-access-rqqqd" (OuterVolumeSpecName: "kube-api-access-rqqqd") pod "f5c4e08e-42b0-4901-98e9-980c9f54b060" (UID: "f5c4e08e-42b0-4901-98e9-980c9f54b060"). InnerVolumeSpecName "kube-api-access-rqqqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.399172 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5c4e08e-42b0-4901-98e9-980c9f54b060-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f5c4e08e-42b0-4901-98e9-980c9f54b060" (UID: "f5c4e08e-42b0-4901-98e9-980c9f54b060"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.481330 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5c4e08e-42b0-4901-98e9-980c9f54b060-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.481366 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqqqd\" (UniqueName: \"kubernetes.io/projected/f5c4e08e-42b0-4901-98e9-980c9f54b060-kube-api-access-rqqqd\") on node \"crc\" DevicePath \"\"" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.481379 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5c4e08e-42b0-4901-98e9-980c9f54b060-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.782601 4982 generic.go:334] "Generic (PLEG): container finished" podID="f5c4e08e-42b0-4901-98e9-980c9f54b060" containerID="a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30" exitCode=0 Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.782674 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7x64j" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.783329 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x64j" event={"ID":"f5c4e08e-42b0-4901-98e9-980c9f54b060","Type":"ContainerDied","Data":"a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30"} Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.783369 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7x64j" event={"ID":"f5c4e08e-42b0-4901-98e9-980c9f54b060","Type":"ContainerDied","Data":"509751fba4f79d2951d9bb309fdb8634233ce5b24d4b12411e5566b596fc97d5"} Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.783386 4982 scope.go:117] "RemoveContainer" containerID="a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.815866 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7x64j"] Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.825266 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7x64j"] Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.828040 4982 scope.go:117] "RemoveContainer" containerID="a295023f33c4b39b5e18b132de7031a120159101c25f4f5a64d3e8bfc64989c8" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.861862 4982 scope.go:117] "RemoveContainer" containerID="4de014732082ad40eaa5217043497e1a45c915bea613890cb21508c9e7aac118" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.906053 4982 scope.go:117] "RemoveContainer" containerID="a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30" Dec 05 19:43:39 crc kubenswrapper[4982]: E1205 19:43:39.906585 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30\": container with ID starting with a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30 not found: ID does not exist" containerID="a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.906640 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30"} err="failed to get container status \"a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30\": rpc error: code = NotFound desc = could not find container \"a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30\": container with ID starting with a6feee834ccc1cd868c215ffe74d216bcaeb6ea8a1315decae862d41d6aa3a30 not found: ID does not exist" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.906675 4982 scope.go:117] "RemoveContainer" containerID="a295023f33c4b39b5e18b132de7031a120159101c25f4f5a64d3e8bfc64989c8" Dec 05 19:43:39 crc kubenswrapper[4982]: E1205 19:43:39.907096 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a295023f33c4b39b5e18b132de7031a120159101c25f4f5a64d3e8bfc64989c8\": container with ID starting with a295023f33c4b39b5e18b132de7031a120159101c25f4f5a64d3e8bfc64989c8 not found: ID does not exist" containerID="a295023f33c4b39b5e18b132de7031a120159101c25f4f5a64d3e8bfc64989c8" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.907138 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a295023f33c4b39b5e18b132de7031a120159101c25f4f5a64d3e8bfc64989c8"} err="failed to get container status \"a295023f33c4b39b5e18b132de7031a120159101c25f4f5a64d3e8bfc64989c8\": rpc error: code = NotFound desc = could not find container \"a295023f33c4b39b5e18b132de7031a120159101c25f4f5a64d3e8bfc64989c8\": container with ID starting with a295023f33c4b39b5e18b132de7031a120159101c25f4f5a64d3e8bfc64989c8 not found: ID does not exist" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.907193 4982 scope.go:117] "RemoveContainer" containerID="4de014732082ad40eaa5217043497e1a45c915bea613890cb21508c9e7aac118" Dec 05 19:43:39 crc kubenswrapper[4982]: E1205 19:43:39.907504 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4de014732082ad40eaa5217043497e1a45c915bea613890cb21508c9e7aac118\": container with ID starting with 4de014732082ad40eaa5217043497e1a45c915bea613890cb21508c9e7aac118 not found: ID does not exist" containerID="4de014732082ad40eaa5217043497e1a45c915bea613890cb21508c9e7aac118" Dec 05 19:43:39 crc kubenswrapper[4982]: I1205 19:43:39.907550 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4de014732082ad40eaa5217043497e1a45c915bea613890cb21508c9e7aac118"} err="failed to get container status \"4de014732082ad40eaa5217043497e1a45c915bea613890cb21508c9e7aac118\": rpc error: code = NotFound desc = could not find container \"4de014732082ad40eaa5217043497e1a45c915bea613890cb21508c9e7aac118\": container with ID starting with 4de014732082ad40eaa5217043497e1a45c915bea613890cb21508c9e7aac118 not found: ID does not exist" Dec 05 19:43:41 crc kubenswrapper[4982]: I1205 19:43:41.404483 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5c4e08e-42b0-4901-98e9-980c9f54b060" path="/var/lib/kubelet/pods/f5c4e08e-42b0-4901-98e9-980c9f54b060/volumes" Dec 05 19:43:48 crc kubenswrapper[4982]: I1205 19:43:48.857143 4982 scope.go:117] "RemoveContainer" containerID="2d25f9fe744dcb293634dbca72e4518e44069adf613bf28e627212a8945c9865" Dec 05 19:43:48 crc kubenswrapper[4982]: I1205 19:43:48.887463 4982 scope.go:117] "RemoveContainer" containerID="0f4346dfca4538237293c8f1ecb509ae836d07b88fb6c3a3919caded08e79ff4" Dec 05 19:43:52 crc kubenswrapper[4982]: I1205 19:43:52.389914 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:43:52 crc kubenswrapper[4982]: E1205 19:43:52.390671 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:43:54 crc kubenswrapper[4982]: I1205 19:43:54.968222 4982 generic.go:334] "Generic (PLEG): container finished" podID="3cd634f3-b987-404a-a10b-609341e2b548" containerID="4a26642636b616b013a0eea4965fe465c960a540994c42ea4db7ddfc7d5622a0" exitCode=0 Dec 05 19:43:54 crc kubenswrapper[4982]: I1205 19:43:54.968271 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" event={"ID":"3cd634f3-b987-404a-a10b-609341e2b548","Type":"ContainerDied","Data":"4a26642636b616b013a0eea4965fe465c960a540994c42ea4db7ddfc7d5622a0"} Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.478631 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.585387 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-inventory\") pod \"3cd634f3-b987-404a-a10b-609341e2b548\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.585469 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-ssh-key\") pod \"3cd634f3-b987-404a-a10b-609341e2b548\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.585589 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b5bdt\" (UniqueName: \"kubernetes.io/projected/3cd634f3-b987-404a-a10b-609341e2b548-kube-api-access-b5bdt\") pod \"3cd634f3-b987-404a-a10b-609341e2b548\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.585646 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-bootstrap-combined-ca-bundle\") pod \"3cd634f3-b987-404a-a10b-609341e2b548\" (UID: \"3cd634f3-b987-404a-a10b-609341e2b548\") " Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.591575 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3cd634f3-b987-404a-a10b-609341e2b548" (UID: "3cd634f3-b987-404a-a10b-609341e2b548"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.597390 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cd634f3-b987-404a-a10b-609341e2b548-kube-api-access-b5bdt" (OuterVolumeSpecName: "kube-api-access-b5bdt") pod "3cd634f3-b987-404a-a10b-609341e2b548" (UID: "3cd634f3-b987-404a-a10b-609341e2b548"). InnerVolumeSpecName "kube-api-access-b5bdt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.614564 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3cd634f3-b987-404a-a10b-609341e2b548" (UID: "3cd634f3-b987-404a-a10b-609341e2b548"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.632255 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-inventory" (OuterVolumeSpecName: "inventory") pod "3cd634f3-b987-404a-a10b-609341e2b548" (UID: "3cd634f3-b987-404a-a10b-609341e2b548"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.687907 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.687977 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.687998 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b5bdt\" (UniqueName: \"kubernetes.io/projected/3cd634f3-b987-404a-a10b-609341e2b548-kube-api-access-b5bdt\") on node \"crc\" DevicePath \"\"" Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.688022 4982 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3cd634f3-b987-404a-a10b-609341e2b548-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.994634 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" event={"ID":"3cd634f3-b987-404a-a10b-609341e2b548","Type":"ContainerDied","Data":"559808d278444a0c6ea1eec621d7ed3355c21bc92a459510575edc136bd602ed"} Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.994671 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="559808d278444a0c6ea1eec621d7ed3355c21bc92a459510575edc136bd602ed" Dec 05 19:43:56 crc kubenswrapper[4982]: I1205 19:43:56.994688 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.097072 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq"] Dec 05 19:43:57 crc kubenswrapper[4982]: E1205 19:43:57.097758 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5c4e08e-42b0-4901-98e9-980c9f54b060" containerName="registry-server" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.097777 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5c4e08e-42b0-4901-98e9-980c9f54b060" containerName="registry-server" Dec 05 19:43:57 crc kubenswrapper[4982]: E1205 19:43:57.097792 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5c4e08e-42b0-4901-98e9-980c9f54b060" containerName="extract-utilities" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.097800 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5c4e08e-42b0-4901-98e9-980c9f54b060" containerName="extract-utilities" Dec 05 19:43:57 crc kubenswrapper[4982]: E1205 19:43:57.097810 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cd634f3-b987-404a-a10b-609341e2b548" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.097817 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cd634f3-b987-404a-a10b-609341e2b548" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 19:43:57 crc kubenswrapper[4982]: E1205 19:43:57.097848 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5c4e08e-42b0-4901-98e9-980c9f54b060" containerName="extract-content" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.097853 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5c4e08e-42b0-4901-98e9-980c9f54b060" containerName="extract-content" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.098065 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5c4e08e-42b0-4901-98e9-980c9f54b060" containerName="registry-server" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.098079 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cd634f3-b987-404a-a10b-609341e2b548" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.098828 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.102068 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.102437 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.102605 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.103320 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.108240 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq"] Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.195229 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq\" (UID: \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.195382 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq\" (UID: \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.195483 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crcvb\" (UniqueName: \"kubernetes.io/projected/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-kube-api-access-crcvb\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq\" (UID: \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.297358 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq\" (UID: \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.297462 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq\" (UID: \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.297501 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crcvb\" (UniqueName: \"kubernetes.io/projected/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-kube-api-access-crcvb\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq\" (UID: \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.303534 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq\" (UID: \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.303608 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq\" (UID: \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.328765 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crcvb\" (UniqueName: \"kubernetes.io/projected/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-kube-api-access-crcvb\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq\" (UID: \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:43:57 crc kubenswrapper[4982]: I1205 19:43:57.459315 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:43:58 crc kubenswrapper[4982]: I1205 19:43:58.007299 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq"] Dec 05 19:43:58 crc kubenswrapper[4982]: W1205 19:43:58.010464 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d9ae0b1_8af5_4522_af29_d67b2c829ca0.slice/crio-26fdb442dd6232cc6d1651e99c773b9b5a319111e61d330b89c3959772c1c166 WatchSource:0}: Error finding container 26fdb442dd6232cc6d1651e99c773b9b5a319111e61d330b89c3959772c1c166: Status 404 returned error can't find the container with id 26fdb442dd6232cc6d1651e99c773b9b5a319111e61d330b89c3959772c1c166 Dec 05 19:43:59 crc kubenswrapper[4982]: I1205 19:43:59.017175 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" event={"ID":"2d9ae0b1-8af5-4522-af29-d67b2c829ca0","Type":"ContainerStarted","Data":"9c8cd9ea5e78ce17bc4f75732a3c82cd48380d4f54bce440ae8e80f82d6cfd09"} Dec 05 19:43:59 crc kubenswrapper[4982]: I1205 19:43:59.017589 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" event={"ID":"2d9ae0b1-8af5-4522-af29-d67b2c829ca0","Type":"ContainerStarted","Data":"26fdb442dd6232cc6d1651e99c773b9b5a319111e61d330b89c3959772c1c166"} Dec 05 19:43:59 crc kubenswrapper[4982]: I1205 19:43:59.035698 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" podStartSLOduration=1.479543521 podStartE2EDuration="2.035684097s" podCreationTimestamp="2025-12-05 19:43:57 +0000 UTC" firstStartedPulling="2025-12-05 19:43:58.013366122 +0000 UTC m=+1816.895252107" lastFinishedPulling="2025-12-05 19:43:58.569506668 +0000 UTC m=+1817.451392683" observedRunningTime="2025-12-05 19:43:59.031602945 +0000 UTC m=+1817.913488960" watchObservedRunningTime="2025-12-05 19:43:59.035684097 +0000 UTC m=+1817.917570092" Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.062850 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-c3f9-account-create-update-vnhdf"] Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.077934 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-c3f9-account-create-update-vnhdf"] Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.089379 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-eb3d-account-create-update-6tb4h"] Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.102348 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-nnd82"] Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.114184 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-6xbcw"] Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.125644 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-56a8-account-create-update-cqrxv"] Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.135088 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-nq9zf"] Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.144811 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-eb3d-account-create-update-6tb4h"] Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.153460 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-6xbcw"] Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.162259 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-nnd82"] Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.174699 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-56a8-account-create-update-cqrxv"] Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.187732 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-nq9zf"] Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.414392 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="242b9e30-d7a8-4ea5-8cd4-e87471ddb18d" path="/var/lib/kubelet/pods/242b9e30-d7a8-4ea5-8cd4-e87471ddb18d/volumes" Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.415825 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a5c0c12-1041-4859-94dd-8b5291a4f2ec" path="/var/lib/kubelet/pods/2a5c0c12-1041-4859-94dd-8b5291a4f2ec/volumes" Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.416738 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a08add5-9ad7-41eb-abfb-7786db71c537" path="/var/lib/kubelet/pods/4a08add5-9ad7-41eb-abfb-7786db71c537/volumes" Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.417672 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7180e9cd-c207-4b5f-984c-fff732e45b76" path="/var/lib/kubelet/pods/7180e9cd-c207-4b5f-984c-fff732e45b76/volumes" Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.419494 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a" path="/var/lib/kubelet/pods/7e4b2e94-26ac-4689-96d5-ac1ba1d35e7a/volumes" Dec 05 19:44:05 crc kubenswrapper[4982]: I1205 19:44:05.420525 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c27964fd-9fbf-40dd-b25a-91d715bd8ff4" path="/var/lib/kubelet/pods/c27964fd-9fbf-40dd-b25a-91d715bd8ff4/volumes" Dec 05 19:44:06 crc kubenswrapper[4982]: I1205 19:44:06.390788 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:44:06 crc kubenswrapper[4982]: E1205 19:44:06.391078 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:44:18 crc kubenswrapper[4982]: I1205 19:44:18.391284 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:44:18 crc kubenswrapper[4982]: E1205 19:44:18.393795 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.068461 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-7c96r"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.079707 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8572-account-create-update-ltmw6"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.094810 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-create-r4cm5"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.106048 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-create-r4cm5"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.115647 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-13aa-account-create-update-hl7v8"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.123993 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-n8vqc"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.132633 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-91af-account-create-update-dkk49"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.140761 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-36e3-account-create-update-nrbz9"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.148908 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-7c96r"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.157825 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-rhzlm"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.166211 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-13aa-account-create-update-hl7v8"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.175694 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-n8vqc"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.184387 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-91af-account-create-update-dkk49"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.193850 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8572-account-create-update-ltmw6"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.202518 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-rhzlm"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.211426 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-36e3-account-create-update-nrbz9"] Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.404985 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a" path="/var/lib/kubelet/pods/25ae63c6-1c5e-4fa4-94e7-8a7800cf5f0a/volumes" Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.405938 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31ed0f9e-af97-4842-8cbf-736e4d9d9a68" path="/var/lib/kubelet/pods/31ed0f9e-af97-4842-8cbf-736e4d9d9a68/volumes" Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.406809 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="434a5cc4-eda4-40f7-a763-c4c61ba909fb" path="/var/lib/kubelet/pods/434a5cc4-eda4-40f7-a763-c4c61ba909fb/volumes" Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.407495 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="505849cd-c790-4007-bea2-c6fef9b2fba4" path="/var/lib/kubelet/pods/505849cd-c790-4007-bea2-c6fef9b2fba4/volumes" Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.408761 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7323378c-0ffc-47fa-88a2-666178163a52" path="/var/lib/kubelet/pods/7323378c-0ffc-47fa-88a2-666178163a52/volumes" Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.409313 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bae30fe-731a-4097-a55f-11dd857f5986" path="/var/lib/kubelet/pods/8bae30fe-731a-4097-a55f-11dd857f5986/volumes" Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.409917 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d61674f-fb7a-4bab-90ad-e4b6c22693ee" path="/var/lib/kubelet/pods/8d61674f-fb7a-4bab-90ad-e4b6c22693ee/volumes" Dec 05 19:44:31 crc kubenswrapper[4982]: I1205 19:44:31.410943 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af444583-8979-46e6-adc3-83cc5d6fbbcf" path="/var/lib/kubelet/pods/af444583-8979-46e6-adc3-83cc5d6fbbcf/volumes" Dec 05 19:44:32 crc kubenswrapper[4982]: I1205 19:44:32.393591 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:44:32 crc kubenswrapper[4982]: E1205 19:44:32.393826 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:44:35 crc kubenswrapper[4982]: I1205 19:44:35.046381 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-jbfm4"] Dec 05 19:44:35 crc kubenswrapper[4982]: I1205 19:44:35.064037 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-jbfm4"] Dec 05 19:44:35 crc kubenswrapper[4982]: I1205 19:44:35.412823 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a404304e-f5bf-4931-8577-9161a96cfd8d" path="/var/lib/kubelet/pods/a404304e-f5bf-4931-8577-9161a96cfd8d/volumes" Dec 05 19:44:37 crc kubenswrapper[4982]: I1205 19:44:37.042431 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-dgbrk"] Dec 05 19:44:37 crc kubenswrapper[4982]: I1205 19:44:37.056686 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-dgbrk"] Dec 05 19:44:37 crc kubenswrapper[4982]: I1205 19:44:37.401853 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="666982dd-eefa-464c-ae19-7b0ffcabcf07" path="/var/lib/kubelet/pods/666982dd-eefa-464c-ae19-7b0ffcabcf07/volumes" Dec 05 19:44:45 crc kubenswrapper[4982]: I1205 19:44:45.391500 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:44:45 crc kubenswrapper[4982]: E1205 19:44:45.392743 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:44:48 crc kubenswrapper[4982]: I1205 19:44:48.963992 4982 scope.go:117] "RemoveContainer" containerID="53ad9660b7924a8d5e03468aa7c3fa1add97006f400009b56282cc49dade2881" Dec 05 19:44:48 crc kubenswrapper[4982]: I1205 19:44:48.990711 4982 scope.go:117] "RemoveContainer" containerID="b11d1b7382b2967e9292886664692c078e569976a36ef5dbf54831d3bd2b4c45" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.039408 4982 scope.go:117] "RemoveContainer" containerID="48c7db6740b1555fdbe7f1ad0652f97e3636d9a90b73b489e4c3a495e3f0fabd" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.062684 4982 scope.go:117] "RemoveContainer" containerID="d3b53ffe23ae5288747b97866c1d9d84c6132943c6d4ba016425db683b69f842" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.116035 4982 scope.go:117] "RemoveContainer" containerID="37f5cdca9ff9c30585429d7d35d8670e019e8ebf92231b63308bf8313e813a28" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.169588 4982 scope.go:117] "RemoveContainer" containerID="80398a6a190f0f1921c183268c9869f89478721ebe65239e1b99eb74d9fee4db" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.226072 4982 scope.go:117] "RemoveContainer" containerID="8fbe20548c3b138be12db51c8be0dc00a7c522fe0ce5e28d6a3cfb791c1db8a2" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.267676 4982 scope.go:117] "RemoveContainer" containerID="b8cd0c581a91dd6efd1b62716bf596fe34ba36e7d79f921c91c2d630a930c66c" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.288515 4982 scope.go:117] "RemoveContainer" containerID="8bf0b19bda090619d8454b2a11e60d57df8ec05560195a2cea2adb0f0af8c2db" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.311693 4982 scope.go:117] "RemoveContainer" containerID="e36a585e1075accbb098ec175960c4944f322f5cecba342c5e99846f508e6227" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.342331 4982 scope.go:117] "RemoveContainer" containerID="7cb1483c9751a0a187666105343480f7cbf8fdb1ccd8301135c2b226a4eaa9e2" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.374643 4982 scope.go:117] "RemoveContainer" containerID="a84c593018586b657355ff28d11cf5b16378847f5717389de75161039b7b9032" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.397986 4982 scope.go:117] "RemoveContainer" containerID="c9f5917405d962414ba72ec97595e839d7404cf30d52b21ed9fe8bd9f13120e9" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.433990 4982 scope.go:117] "RemoveContainer" containerID="a61367614156ba6f63ab4986bdefae65b372e34b00d50cc9ccfc6129241eb9e7" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.457122 4982 scope.go:117] "RemoveContainer" containerID="030c04ecc5e458fd10bc3ba028f2e77c8d47af7da3e26294152ca844c17d9d14" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.480267 4982 scope.go:117] "RemoveContainer" containerID="9ce49f359f6f70f701594d5208cc9cfc2a3bcc5d382dff6f897bf14d5b23a6bb" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.499941 4982 scope.go:117] "RemoveContainer" containerID="08c8bf3a282428c2f1ff8240a2d7cc9156faf8429d6bb67a63de7dff8e0c00d2" Dec 05 19:44:49 crc kubenswrapper[4982]: I1205 19:44:49.520764 4982 scope.go:117] "RemoveContainer" containerID="d967c09c0e8353e004d0c09cda710e7f769f1cec1b8c1baa1ed0bd128f029399" Dec 05 19:44:59 crc kubenswrapper[4982]: I1205 19:44:59.390700 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:44:59 crc kubenswrapper[4982]: E1205 19:44:59.391483 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.148701 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc"] Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.150487 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.152459 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.153251 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.159766 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc"] Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.238348 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c44f0df7-bb79-4c72-976f-baa2ab421cc3-secret-volume\") pod \"collect-profiles-29416065-cdpjc\" (UID: \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.238461 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c44f0df7-bb79-4c72-976f-baa2ab421cc3-config-volume\") pod \"collect-profiles-29416065-cdpjc\" (UID: \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.238512 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbhdp\" (UniqueName: \"kubernetes.io/projected/c44f0df7-bb79-4c72-976f-baa2ab421cc3-kube-api-access-dbhdp\") pod \"collect-profiles-29416065-cdpjc\" (UID: \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.340305 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c44f0df7-bb79-4c72-976f-baa2ab421cc3-secret-volume\") pod \"collect-profiles-29416065-cdpjc\" (UID: \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.341594 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c44f0df7-bb79-4c72-976f-baa2ab421cc3-config-volume\") pod \"collect-profiles-29416065-cdpjc\" (UID: \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.341726 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbhdp\" (UniqueName: \"kubernetes.io/projected/c44f0df7-bb79-4c72-976f-baa2ab421cc3-kube-api-access-dbhdp\") pod \"collect-profiles-29416065-cdpjc\" (UID: \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.342402 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c44f0df7-bb79-4c72-976f-baa2ab421cc3-config-volume\") pod \"collect-profiles-29416065-cdpjc\" (UID: \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.352768 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c44f0df7-bb79-4c72-976f-baa2ab421cc3-secret-volume\") pod \"collect-profiles-29416065-cdpjc\" (UID: \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.375817 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbhdp\" (UniqueName: \"kubernetes.io/projected/c44f0df7-bb79-4c72-976f-baa2ab421cc3-kube-api-access-dbhdp\") pod \"collect-profiles-29416065-cdpjc\" (UID: \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.471575 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:00 crc kubenswrapper[4982]: I1205 19:45:00.960856 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc"] Dec 05 19:45:01 crc kubenswrapper[4982]: I1205 19:45:01.755544 4982 generic.go:334] "Generic (PLEG): container finished" podID="c44f0df7-bb79-4c72-976f-baa2ab421cc3" containerID="c303691403e8bba95198ed7f1390824b8be00d24dea877e23e0b6618ef409762" exitCode=0 Dec 05 19:45:01 crc kubenswrapper[4982]: I1205 19:45:01.755931 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" event={"ID":"c44f0df7-bb79-4c72-976f-baa2ab421cc3","Type":"ContainerDied","Data":"c303691403e8bba95198ed7f1390824b8be00d24dea877e23e0b6618ef409762"} Dec 05 19:45:01 crc kubenswrapper[4982]: I1205 19:45:01.755969 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" event={"ID":"c44f0df7-bb79-4c72-976f-baa2ab421cc3","Type":"ContainerStarted","Data":"ad391ff50a009b74d092a0cd0af1e658cd5d3c01826ea4116b8fcf221655eb01"} Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.198884 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.304496 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbhdp\" (UniqueName: \"kubernetes.io/projected/c44f0df7-bb79-4c72-976f-baa2ab421cc3-kube-api-access-dbhdp\") pod \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\" (UID: \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\") " Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.304586 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c44f0df7-bb79-4c72-976f-baa2ab421cc3-secret-volume\") pod \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\" (UID: \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\") " Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.304789 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c44f0df7-bb79-4c72-976f-baa2ab421cc3-config-volume\") pod \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\" (UID: \"c44f0df7-bb79-4c72-976f-baa2ab421cc3\") " Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.305727 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c44f0df7-bb79-4c72-976f-baa2ab421cc3-config-volume" (OuterVolumeSpecName: "config-volume") pod "c44f0df7-bb79-4c72-976f-baa2ab421cc3" (UID: "c44f0df7-bb79-4c72-976f-baa2ab421cc3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.310651 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c44f0df7-bb79-4c72-976f-baa2ab421cc3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c44f0df7-bb79-4c72-976f-baa2ab421cc3" (UID: "c44f0df7-bb79-4c72-976f-baa2ab421cc3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.311285 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c44f0df7-bb79-4c72-976f-baa2ab421cc3-kube-api-access-dbhdp" (OuterVolumeSpecName: "kube-api-access-dbhdp") pod "c44f0df7-bb79-4c72-976f-baa2ab421cc3" (UID: "c44f0df7-bb79-4c72-976f-baa2ab421cc3"). InnerVolumeSpecName "kube-api-access-dbhdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.407040 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbhdp\" (UniqueName: \"kubernetes.io/projected/c44f0df7-bb79-4c72-976f-baa2ab421cc3-kube-api-access-dbhdp\") on node \"crc\" DevicePath \"\"" Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.407078 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c44f0df7-bb79-4c72-976f-baa2ab421cc3-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.407087 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c44f0df7-bb79-4c72-976f-baa2ab421cc3-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.776614 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" event={"ID":"c44f0df7-bb79-4c72-976f-baa2ab421cc3","Type":"ContainerDied","Data":"ad391ff50a009b74d092a0cd0af1e658cd5d3c01826ea4116b8fcf221655eb01"} Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.777002 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad391ff50a009b74d092a0cd0af1e658cd5d3c01826ea4116b8fcf221655eb01" Dec 05 19:45:03 crc kubenswrapper[4982]: I1205 19:45:03.776669 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc" Dec 05 19:45:14 crc kubenswrapper[4982]: I1205 19:45:14.390076 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:45:14 crc kubenswrapper[4982]: E1205 19:45:14.391242 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:45:18 crc kubenswrapper[4982]: I1205 19:45:18.048879 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-nt8vq"] Dec 05 19:45:18 crc kubenswrapper[4982]: I1205 19:45:18.058579 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-nt8vq"] Dec 05 19:45:19 crc kubenswrapper[4982]: I1205 19:45:19.407343 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a107703-d667-459a-862f-9ba79f86042f" path="/var/lib/kubelet/pods/3a107703-d667-459a-862f-9ba79f86042f/volumes" Dec 05 19:45:27 crc kubenswrapper[4982]: I1205 19:45:27.390468 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:45:27 crc kubenswrapper[4982]: E1205 19:45:27.391398 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:45:29 crc kubenswrapper[4982]: I1205 19:45:29.057276 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-k7wz7"] Dec 05 19:45:29 crc kubenswrapper[4982]: I1205 19:45:29.069172 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-zz5w6"] Dec 05 19:45:29 crc kubenswrapper[4982]: I1205 19:45:29.083045 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-k7wz7"] Dec 05 19:45:29 crc kubenswrapper[4982]: I1205 19:45:29.091776 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-zz5w6"] Dec 05 19:45:29 crc kubenswrapper[4982]: I1205 19:45:29.401211 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="075da87a-0ab9-462b-9435-5881b90bd9a3" path="/var/lib/kubelet/pods/075da87a-0ab9-462b-9435-5881b90bd9a3/volumes" Dec 05 19:45:29 crc kubenswrapper[4982]: I1205 19:45:29.401761 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b33cca72-9a2b-4042-b38e-3f01396d064c" path="/var/lib/kubelet/pods/b33cca72-9a2b-4042-b38e-3f01396d064c/volumes" Dec 05 19:45:38 crc kubenswrapper[4982]: I1205 19:45:38.390680 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:45:38 crc kubenswrapper[4982]: E1205 19:45:38.391450 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:45:46 crc kubenswrapper[4982]: I1205 19:45:46.030563 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-qb5jh"] Dec 05 19:45:46 crc kubenswrapper[4982]: I1205 19:45:46.042788 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-qb5jh"] Dec 05 19:45:47 crc kubenswrapper[4982]: I1205 19:45:47.037413 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-6vvtc"] Dec 05 19:45:47 crc kubenswrapper[4982]: I1205 19:45:47.050654 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-6vvtc"] Dec 05 19:45:47 crc kubenswrapper[4982]: I1205 19:45:47.404282 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="253ffb42-0135-4d3b-b21c-0810b4591a69" path="/var/lib/kubelet/pods/253ffb42-0135-4d3b-b21c-0810b4591a69/volumes" Dec 05 19:45:47 crc kubenswrapper[4982]: I1205 19:45:47.405276 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="641d839f-9ca5-4835-ba20-2c6981a00df3" path="/var/lib/kubelet/pods/641d839f-9ca5-4835-ba20-2c6981a00df3/volumes" Dec 05 19:45:49 crc kubenswrapper[4982]: I1205 19:45:49.883067 4982 scope.go:117] "RemoveContainer" containerID="13717ee3c226504c6bde18df5a3db4eee28b8af889897da70f9a7c99dca4fbe4" Dec 05 19:45:49 crc kubenswrapper[4982]: I1205 19:45:49.927238 4982 scope.go:117] "RemoveContainer" containerID="afc1350bbe3ae2c1afb759fb8b16c3a0f2a70d5a59529d9372f5f0fd325cc0a1" Dec 05 19:45:49 crc kubenswrapper[4982]: I1205 19:45:49.962128 4982 scope.go:117] "RemoveContainer" containerID="da898f364e9f895b46391b48d85e9de6ac79239ad0635ebb462fad8ea60dfe57" Dec 05 19:45:50 crc kubenswrapper[4982]: I1205 19:45:50.010848 4982 scope.go:117] "RemoveContainer" containerID="d0364e692e0fe8dcc6576f50ff0a5d1b2769552586499e6cc15a11e6551b1070" Dec 05 19:45:50 crc kubenswrapper[4982]: I1205 19:45:50.067525 4982 scope.go:117] "RemoveContainer" containerID="3210ea976023a897587e17fc45685184e786eb8ce297c0e13bbe66dddad4d0d9" Dec 05 19:45:53 crc kubenswrapper[4982]: I1205 19:45:53.390999 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:45:54 crc kubenswrapper[4982]: I1205 19:45:54.300889 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"f01c1d5189425b39486d48c52bcbcc79562af7955203c5d8cee9a8137a433a91"} Dec 05 19:46:05 crc kubenswrapper[4982]: I1205 19:46:05.426786 4982 generic.go:334] "Generic (PLEG): container finished" podID="2d9ae0b1-8af5-4522-af29-d67b2c829ca0" containerID="9c8cd9ea5e78ce17bc4f75732a3c82cd48380d4f54bce440ae8e80f82d6cfd09" exitCode=0 Dec 05 19:46:05 crc kubenswrapper[4982]: I1205 19:46:05.427241 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" event={"ID":"2d9ae0b1-8af5-4522-af29-d67b2c829ca0","Type":"ContainerDied","Data":"9c8cd9ea5e78ce17bc4f75732a3c82cd48380d4f54bce440ae8e80f82d6cfd09"} Dec 05 19:46:06 crc kubenswrapper[4982]: I1205 19:46:06.963112 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.061945 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-ssh-key\") pod \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\" (UID: \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\") " Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.062084 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crcvb\" (UniqueName: \"kubernetes.io/projected/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-kube-api-access-crcvb\") pod \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\" (UID: \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\") " Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.062134 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-inventory\") pod \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\" (UID: \"2d9ae0b1-8af5-4522-af29-d67b2c829ca0\") " Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.069473 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-kube-api-access-crcvb" (OuterVolumeSpecName: "kube-api-access-crcvb") pod "2d9ae0b1-8af5-4522-af29-d67b2c829ca0" (UID: "2d9ae0b1-8af5-4522-af29-d67b2c829ca0"). InnerVolumeSpecName "kube-api-access-crcvb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.097126 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-inventory" (OuterVolumeSpecName: "inventory") pod "2d9ae0b1-8af5-4522-af29-d67b2c829ca0" (UID: "2d9ae0b1-8af5-4522-af29-d67b2c829ca0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.102352 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2d9ae0b1-8af5-4522-af29-d67b2c829ca0" (UID: "2d9ae0b1-8af5-4522-af29-d67b2c829ca0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.164920 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crcvb\" (UniqueName: \"kubernetes.io/projected/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-kube-api-access-crcvb\") on node \"crc\" DevicePath \"\"" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.164978 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.164988 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2d9ae0b1-8af5-4522-af29-d67b2c829ca0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.445861 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" event={"ID":"2d9ae0b1-8af5-4522-af29-d67b2c829ca0","Type":"ContainerDied","Data":"26fdb442dd6232cc6d1651e99c773b9b5a319111e61d330b89c3959772c1c166"} Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.445892 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.445895 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26fdb442dd6232cc6d1651e99c773b9b5a319111e61d330b89c3959772c1c166" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.538310 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4"] Dec 05 19:46:07 crc kubenswrapper[4982]: E1205 19:46:07.538843 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d9ae0b1-8af5-4522-af29-d67b2c829ca0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.538869 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d9ae0b1-8af5-4522-af29-d67b2c829ca0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 19:46:07 crc kubenswrapper[4982]: E1205 19:46:07.538903 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c44f0df7-bb79-4c72-976f-baa2ab421cc3" containerName="collect-profiles" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.538915 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c44f0df7-bb79-4c72-976f-baa2ab421cc3" containerName="collect-profiles" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.539233 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d9ae0b1-8af5-4522-af29-d67b2c829ca0" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.539259 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c44f0df7-bb79-4c72-976f-baa2ab421cc3" containerName="collect-profiles" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.540249 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.544781 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.544900 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.545137 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.545444 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.548436 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4"] Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.673482 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ggxl\" (UniqueName: \"kubernetes.io/projected/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-kube-api-access-4ggxl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4\" (UID: \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.673815 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4\" (UID: \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.673986 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4\" (UID: \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.775348 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ggxl\" (UniqueName: \"kubernetes.io/projected/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-kube-api-access-4ggxl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4\" (UID: \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.775579 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4\" (UID: \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.775759 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4\" (UID: \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.779523 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4\" (UID: \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.780323 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4\" (UID: \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.792514 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ggxl\" (UniqueName: \"kubernetes.io/projected/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-kube-api-access-4ggxl\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4\" (UID: \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:46:07 crc kubenswrapper[4982]: I1205 19:46:07.863070 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:46:08 crc kubenswrapper[4982]: I1205 19:46:08.461289 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 19:46:08 crc kubenswrapper[4982]: I1205 19:46:08.462771 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4"] Dec 05 19:46:09 crc kubenswrapper[4982]: I1205 19:46:09.467894 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" event={"ID":"27e277b5-5b4e-4d77-afbd-1b7c2d53918e","Type":"ContainerStarted","Data":"c6ceaf0044c028fd5c57f7529de4d237d4a7c9ddc83387b9571ec2bcf1e5e68e"} Dec 05 19:46:09 crc kubenswrapper[4982]: I1205 19:46:09.468521 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" event={"ID":"27e277b5-5b4e-4d77-afbd-1b7c2d53918e","Type":"ContainerStarted","Data":"55d179a630b5f22dc11a23bb09ee677718bc4e1923bab33158c00c6a4f75d60a"} Dec 05 19:46:09 crc kubenswrapper[4982]: I1205 19:46:09.486546 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" podStartSLOduration=2.033173684 podStartE2EDuration="2.486527226s" podCreationTimestamp="2025-12-05 19:46:07 +0000 UTC" firstStartedPulling="2025-12-05 19:46:08.460953939 +0000 UTC m=+1947.342839934" lastFinishedPulling="2025-12-05 19:46:08.914307471 +0000 UTC m=+1947.796193476" observedRunningTime="2025-12-05 19:46:09.482449133 +0000 UTC m=+1948.364335138" watchObservedRunningTime="2025-12-05 19:46:09.486527226 +0000 UTC m=+1948.368413221" Dec 05 19:46:50 crc kubenswrapper[4982]: I1205 19:46:50.038916 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-9d07-account-create-update-sn9k2"] Dec 05 19:46:50 crc kubenswrapper[4982]: I1205 19:46:50.047397 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-blhbk"] Dec 05 19:46:50 crc kubenswrapper[4982]: I1205 19:46:50.057626 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-9d07-account-create-update-sn9k2"] Dec 05 19:46:50 crc kubenswrapper[4982]: I1205 19:46:50.066245 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-blhbk"] Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.053747 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-tj2sp"] Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.072094 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-43bb-account-create-update-lt54d"] Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.084255 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-f4b4-account-create-update-6r4nb"] Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.094051 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-tj2sp"] Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.101980 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-t884j"] Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.109765 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-43bb-account-create-update-lt54d"] Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.117849 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-f4b4-account-create-update-6r4nb"] Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.126042 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-t884j"] Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.420109 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81d13df7-6d0f-4034-8186-f6f05e3f15c3" path="/var/lib/kubelet/pods/81d13df7-6d0f-4034-8186-f6f05e3f15c3/volumes" Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.422115 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86e5389c-d500-4b5e-a7f6-e070b2f64179" path="/var/lib/kubelet/pods/86e5389c-d500-4b5e-a7f6-e070b2f64179/volumes" Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.424059 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b70fe631-4513-4d09-9122-50730c5fe397" path="/var/lib/kubelet/pods/b70fe631-4513-4d09-9122-50730c5fe397/volumes" Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.425258 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb311798-8f2d-4a9a-92fa-3e72f0032912" path="/var/lib/kubelet/pods/cb311798-8f2d-4a9a-92fa-3e72f0032912/volumes" Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.427483 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d0b8f169-dd34-48eb-b55f-7bebeaac5abf" path="/var/lib/kubelet/pods/d0b8f169-dd34-48eb-b55f-7bebeaac5abf/volumes" Dec 05 19:46:51 crc kubenswrapper[4982]: I1205 19:46:51.428687 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2a57203-aa68-4b31-96ea-e522f4daf205" path="/var/lib/kubelet/pods/e2a57203-aa68-4b31-96ea-e522f4daf205/volumes" Dec 05 19:47:18 crc kubenswrapper[4982]: I1205 19:47:18.047799 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dpwvj"] Dec 05 19:47:18 crc kubenswrapper[4982]: I1205 19:47:18.058245 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-dpwvj"] Dec 05 19:47:19 crc kubenswrapper[4982]: I1205 19:47:19.405939 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="621c36ce-6cd3-4d73-8c51-c278a760bdad" path="/var/lib/kubelet/pods/621c36ce-6cd3-4d73-8c51-c278a760bdad/volumes" Dec 05 19:47:25 crc kubenswrapper[4982]: I1205 19:47:25.268884 4982 generic.go:334] "Generic (PLEG): container finished" podID="27e277b5-5b4e-4d77-afbd-1b7c2d53918e" containerID="c6ceaf0044c028fd5c57f7529de4d237d4a7c9ddc83387b9571ec2bcf1e5e68e" exitCode=0 Dec 05 19:47:25 crc kubenswrapper[4982]: I1205 19:47:25.268987 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" event={"ID":"27e277b5-5b4e-4d77-afbd-1b7c2d53918e","Type":"ContainerDied","Data":"c6ceaf0044c028fd5c57f7529de4d237d4a7c9ddc83387b9571ec2bcf1e5e68e"} Dec 05 19:47:26 crc kubenswrapper[4982]: I1205 19:47:26.794587 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:47:26 crc kubenswrapper[4982]: I1205 19:47:26.976840 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-inventory\") pod \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\" (UID: \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\") " Dec 05 19:47:26 crc kubenswrapper[4982]: I1205 19:47:26.977281 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ggxl\" (UniqueName: \"kubernetes.io/projected/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-kube-api-access-4ggxl\") pod \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\" (UID: \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\") " Dec 05 19:47:26 crc kubenswrapper[4982]: I1205 19:47:26.977396 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-ssh-key\") pod \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\" (UID: \"27e277b5-5b4e-4d77-afbd-1b7c2d53918e\") " Dec 05 19:47:26 crc kubenswrapper[4982]: I1205 19:47:26.989499 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-kube-api-access-4ggxl" (OuterVolumeSpecName: "kube-api-access-4ggxl") pod "27e277b5-5b4e-4d77-afbd-1b7c2d53918e" (UID: "27e277b5-5b4e-4d77-afbd-1b7c2d53918e"). InnerVolumeSpecName "kube-api-access-4ggxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.019175 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "27e277b5-5b4e-4d77-afbd-1b7c2d53918e" (UID: "27e277b5-5b4e-4d77-afbd-1b7c2d53918e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.047776 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-inventory" (OuterVolumeSpecName: "inventory") pod "27e277b5-5b4e-4d77-afbd-1b7c2d53918e" (UID: "27e277b5-5b4e-4d77-afbd-1b7c2d53918e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.080597 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ggxl\" (UniqueName: \"kubernetes.io/projected/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-kube-api-access-4ggxl\") on node \"crc\" DevicePath \"\"" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.080627 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.080639 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/27e277b5-5b4e-4d77-afbd-1b7c2d53918e-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.292933 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" event={"ID":"27e277b5-5b4e-4d77-afbd-1b7c2d53918e","Type":"ContainerDied","Data":"55d179a630b5f22dc11a23bb09ee677718bc4e1923bab33158c00c6a4f75d60a"} Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.292980 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="55d179a630b5f22dc11a23bb09ee677718bc4e1923bab33158c00c6a4f75d60a" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.292986 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.372294 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn"] Dec 05 19:47:27 crc kubenswrapper[4982]: E1205 19:47:27.372855 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27e277b5-5b4e-4d77-afbd-1b7c2d53918e" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.372878 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="27e277b5-5b4e-4d77-afbd-1b7c2d53918e" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.373097 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="27e277b5-5b4e-4d77-afbd-1b7c2d53918e" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.373897 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.375984 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.376040 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.376039 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.380886 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.404210 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn"] Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.489165 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d26e25b0-3708-4c24-9034-36a8ab878465-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b56qn\" (UID: \"d26e25b0-3708-4c24-9034-36a8ab878465\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.490571 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d26e25b0-3708-4c24-9034-36a8ab878465-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b56qn\" (UID: \"d26e25b0-3708-4c24-9034-36a8ab878465\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.490614 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lzdsl\" (UniqueName: \"kubernetes.io/projected/d26e25b0-3708-4c24-9034-36a8ab878465-kube-api-access-lzdsl\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b56qn\" (UID: \"d26e25b0-3708-4c24-9034-36a8ab878465\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.593072 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d26e25b0-3708-4c24-9034-36a8ab878465-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b56qn\" (UID: \"d26e25b0-3708-4c24-9034-36a8ab878465\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.593196 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lzdsl\" (UniqueName: \"kubernetes.io/projected/d26e25b0-3708-4c24-9034-36a8ab878465-kube-api-access-lzdsl\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b56qn\" (UID: \"d26e25b0-3708-4c24-9034-36a8ab878465\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.593719 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d26e25b0-3708-4c24-9034-36a8ab878465-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b56qn\" (UID: \"d26e25b0-3708-4c24-9034-36a8ab878465\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.598318 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d26e25b0-3708-4c24-9034-36a8ab878465-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b56qn\" (UID: \"d26e25b0-3708-4c24-9034-36a8ab878465\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.600525 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d26e25b0-3708-4c24-9034-36a8ab878465-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b56qn\" (UID: \"d26e25b0-3708-4c24-9034-36a8ab878465\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.609708 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lzdsl\" (UniqueName: \"kubernetes.io/projected/d26e25b0-3708-4c24-9034-36a8ab878465-kube-api-access-lzdsl\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-b56qn\" (UID: \"d26e25b0-3708-4c24-9034-36a8ab878465\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:27 crc kubenswrapper[4982]: I1205 19:47:27.696458 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:28 crc kubenswrapper[4982]: I1205 19:47:28.269946 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn"] Dec 05 19:47:28 crc kubenswrapper[4982]: I1205 19:47:28.303713 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" event={"ID":"d26e25b0-3708-4c24-9034-36a8ab878465","Type":"ContainerStarted","Data":"f28717fc44da2663d16e934bce6b8ca32013c222473c564da0c770419883a442"} Dec 05 19:47:29 crc kubenswrapper[4982]: I1205 19:47:29.315960 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" event={"ID":"d26e25b0-3708-4c24-9034-36a8ab878465","Type":"ContainerStarted","Data":"7195cc1e4a7202cc724717d2e8dc332bd780454c73787ba21ff499a1839a46fd"} Dec 05 19:47:29 crc kubenswrapper[4982]: I1205 19:47:29.333015 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" podStartSLOduration=1.811448945 podStartE2EDuration="2.332994474s" podCreationTimestamp="2025-12-05 19:47:27 +0000 UTC" firstStartedPulling="2025-12-05 19:47:28.270479001 +0000 UTC m=+2027.152364996" lastFinishedPulling="2025-12-05 19:47:28.79202453 +0000 UTC m=+2027.673910525" observedRunningTime="2025-12-05 19:47:29.329201038 +0000 UTC m=+2028.211087033" watchObservedRunningTime="2025-12-05 19:47:29.332994474 +0000 UTC m=+2028.214880469" Dec 05 19:47:34 crc kubenswrapper[4982]: I1205 19:47:34.365002 4982 generic.go:334] "Generic (PLEG): container finished" podID="d26e25b0-3708-4c24-9034-36a8ab878465" containerID="7195cc1e4a7202cc724717d2e8dc332bd780454c73787ba21ff499a1839a46fd" exitCode=0 Dec 05 19:47:34 crc kubenswrapper[4982]: I1205 19:47:34.365199 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" event={"ID":"d26e25b0-3708-4c24-9034-36a8ab878465","Type":"ContainerDied","Data":"7195cc1e4a7202cc724717d2e8dc332bd780454c73787ba21ff499a1839a46fd"} Dec 05 19:47:35 crc kubenswrapper[4982]: I1205 19:47:35.902786 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.087366 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzdsl\" (UniqueName: \"kubernetes.io/projected/d26e25b0-3708-4c24-9034-36a8ab878465-kube-api-access-lzdsl\") pod \"d26e25b0-3708-4c24-9034-36a8ab878465\" (UID: \"d26e25b0-3708-4c24-9034-36a8ab878465\") " Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.087431 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d26e25b0-3708-4c24-9034-36a8ab878465-inventory\") pod \"d26e25b0-3708-4c24-9034-36a8ab878465\" (UID: \"d26e25b0-3708-4c24-9034-36a8ab878465\") " Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.087671 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d26e25b0-3708-4c24-9034-36a8ab878465-ssh-key\") pod \"d26e25b0-3708-4c24-9034-36a8ab878465\" (UID: \"d26e25b0-3708-4c24-9034-36a8ab878465\") " Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.093320 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d26e25b0-3708-4c24-9034-36a8ab878465-kube-api-access-lzdsl" (OuterVolumeSpecName: "kube-api-access-lzdsl") pod "d26e25b0-3708-4c24-9034-36a8ab878465" (UID: "d26e25b0-3708-4c24-9034-36a8ab878465"). InnerVolumeSpecName "kube-api-access-lzdsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.123347 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d26e25b0-3708-4c24-9034-36a8ab878465-inventory" (OuterVolumeSpecName: "inventory") pod "d26e25b0-3708-4c24-9034-36a8ab878465" (UID: "d26e25b0-3708-4c24-9034-36a8ab878465"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.146037 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d26e25b0-3708-4c24-9034-36a8ab878465-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d26e25b0-3708-4c24-9034-36a8ab878465" (UID: "d26e25b0-3708-4c24-9034-36a8ab878465"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.190790 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d26e25b0-3708-4c24-9034-36a8ab878465-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.190839 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzdsl\" (UniqueName: \"kubernetes.io/projected/d26e25b0-3708-4c24-9034-36a8ab878465-kube-api-access-lzdsl\") on node \"crc\" DevicePath \"\"" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.190863 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d26e25b0-3708-4c24-9034-36a8ab878465-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.390196 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" event={"ID":"d26e25b0-3708-4c24-9034-36a8ab878465","Type":"ContainerDied","Data":"f28717fc44da2663d16e934bce6b8ca32013c222473c564da0c770419883a442"} Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.390242 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f28717fc44da2663d16e934bce6b8ca32013c222473c564da0c770419883a442" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.390375 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-b56qn" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.590834 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj"] Dec 05 19:47:36 crc kubenswrapper[4982]: E1205 19:47:36.591898 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d26e25b0-3708-4c24-9034-36a8ab878465" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.591995 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d26e25b0-3708-4c24-9034-36a8ab878465" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.592489 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d26e25b0-3708-4c24-9034-36a8ab878465" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.593703 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.597590 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.597643 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.597684 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.597899 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.604367 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj"] Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.700797 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-vlqtj\" (UID: \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.700953 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-vlqtj\" (UID: \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.701069 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppcj2\" (UniqueName: \"kubernetes.io/projected/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-kube-api-access-ppcj2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-vlqtj\" (UID: \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.804019 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppcj2\" (UniqueName: \"kubernetes.io/projected/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-kube-api-access-ppcj2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-vlqtj\" (UID: \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.804432 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-vlqtj\" (UID: \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.804530 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-vlqtj\" (UID: \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.809243 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-vlqtj\" (UID: \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.809692 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-vlqtj\" (UID: \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.827705 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppcj2\" (UniqueName: \"kubernetes.io/projected/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-kube-api-access-ppcj2\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-vlqtj\" (UID: \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:47:36 crc kubenswrapper[4982]: I1205 19:47:36.915079 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:47:37 crc kubenswrapper[4982]: I1205 19:47:37.501037 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj"] Dec 05 19:47:38 crc kubenswrapper[4982]: I1205 19:47:38.409024 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" event={"ID":"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb","Type":"ContainerStarted","Data":"e893127233415cb5a930ce71a37ef4fc943f78346dafc3fb4217d90f8dac68c4"} Dec 05 19:47:38 crc kubenswrapper[4982]: I1205 19:47:38.409401 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" event={"ID":"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb","Type":"ContainerStarted","Data":"a2bed72ad70b4dc366135b4124f8047601b1ac556884c8801286cf29edd3e075"} Dec 05 19:47:38 crc kubenswrapper[4982]: I1205 19:47:38.435422 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" podStartSLOduration=1.871529249 podStartE2EDuration="2.435401992s" podCreationTimestamp="2025-12-05 19:47:36 +0000 UTC" firstStartedPulling="2025-12-05 19:47:37.508352683 +0000 UTC m=+2036.390238678" lastFinishedPulling="2025-12-05 19:47:38.072225416 +0000 UTC m=+2036.954111421" observedRunningTime="2025-12-05 19:47:38.423995313 +0000 UTC m=+2037.305881318" watchObservedRunningTime="2025-12-05 19:47:38.435401992 +0000 UTC m=+2037.317287987" Dec 05 19:47:42 crc kubenswrapper[4982]: I1205 19:47:42.096854 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-l2zcs"] Dec 05 19:47:42 crc kubenswrapper[4982]: I1205 19:47:42.106677 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-42bfv"] Dec 05 19:47:42 crc kubenswrapper[4982]: I1205 19:47:42.117172 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-l2zcs"] Dec 05 19:47:42 crc kubenswrapper[4982]: I1205 19:47:42.127722 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-42bfv"] Dec 05 19:47:43 crc kubenswrapper[4982]: I1205 19:47:43.410624 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afa9e944-29e0-416e-8c19-f3b9786c8464" path="/var/lib/kubelet/pods/afa9e944-29e0-416e-8c19-f3b9786c8464/volumes" Dec 05 19:47:43 crc kubenswrapper[4982]: I1205 19:47:43.411873 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1fb8006-cfc6-412d-90be-9bb828949621" path="/var/lib/kubelet/pods/b1fb8006-cfc6-412d-90be-9bb828949621/volumes" Dec 05 19:47:50 crc kubenswrapper[4982]: I1205 19:47:50.226520 4982 scope.go:117] "RemoveContainer" containerID="11f17c71169e9f84bc8e277f6dd5d156260dabd71381c586fbdb857c18ca6dcd" Dec 05 19:47:50 crc kubenswrapper[4982]: I1205 19:47:50.252587 4982 scope.go:117] "RemoveContainer" containerID="e3088de0ff1ce25a8acb9bd7b737fbc2827ceb776a3890bd556fc75093141bc6" Dec 05 19:47:50 crc kubenswrapper[4982]: I1205 19:47:50.304860 4982 scope.go:117] "RemoveContainer" containerID="705c84edcdd57f073ad0a5a426a15b732a6d7cd93c9a2e89368fa4172af23652" Dec 05 19:47:50 crc kubenswrapper[4982]: I1205 19:47:50.367550 4982 scope.go:117] "RemoveContainer" containerID="b570e0285b09030ee506c6932baf9c8b048b87430c8f2360a02198de12621036" Dec 05 19:47:50 crc kubenswrapper[4982]: I1205 19:47:50.418788 4982 scope.go:117] "RemoveContainer" containerID="699747e4c64acab7ad2c54377703b1769ead6d2cdd15a0ac89736e29c1fb8aca" Dec 05 19:47:50 crc kubenswrapper[4982]: I1205 19:47:50.468421 4982 scope.go:117] "RemoveContainer" containerID="52855649d07f62bbab80802ca369507eb04bea20184fce99b1bf10363654a418" Dec 05 19:47:50 crc kubenswrapper[4982]: I1205 19:47:50.513768 4982 scope.go:117] "RemoveContainer" containerID="24864d8c2a496c525444d9d094c8553342163ec17200ded69e1df927a0a3f5cc" Dec 05 19:47:50 crc kubenswrapper[4982]: I1205 19:47:50.554264 4982 scope.go:117] "RemoveContainer" containerID="89ee36043206e58b6f3ad8b72b084f2c3567bac77a4a47185f596e654f8eea6e" Dec 05 19:47:50 crc kubenswrapper[4982]: I1205 19:47:50.571987 4982 scope.go:117] "RemoveContainer" containerID="8b1dcee64e05cb676a934e0168f923dfd361a873491f0b05eda5d10f9b9c0fd4" Dec 05 19:48:12 crc kubenswrapper[4982]: I1205 19:48:12.557722 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:48:12 crc kubenswrapper[4982]: I1205 19:48:12.558234 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:48:15 crc kubenswrapper[4982]: I1205 19:48:15.806823 4982 generic.go:334] "Generic (PLEG): container finished" podID="f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb" containerID="e893127233415cb5a930ce71a37ef4fc943f78346dafc3fb4217d90f8dac68c4" exitCode=0 Dec 05 19:48:15 crc kubenswrapper[4982]: I1205 19:48:15.806905 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" event={"ID":"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb","Type":"ContainerDied","Data":"e893127233415cb5a930ce71a37ef4fc943f78346dafc3fb4217d90f8dac68c4"} Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.314053 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.436561 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-inventory\") pod \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\" (UID: \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\") " Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.437195 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppcj2\" (UniqueName: \"kubernetes.io/projected/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-kube-api-access-ppcj2\") pod \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\" (UID: \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\") " Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.437532 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-ssh-key\") pod \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\" (UID: \"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb\") " Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.441914 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-kube-api-access-ppcj2" (OuterVolumeSpecName: "kube-api-access-ppcj2") pod "f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb" (UID: "f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb"). InnerVolumeSpecName "kube-api-access-ppcj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.464258 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb" (UID: "f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.474131 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-inventory" (OuterVolumeSpecName: "inventory") pod "f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb" (UID: "f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.539944 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.539973 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppcj2\" (UniqueName: \"kubernetes.io/projected/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-kube-api-access-ppcj2\") on node \"crc\" DevicePath \"\"" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.539985 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.830840 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" event={"ID":"f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb","Type":"ContainerDied","Data":"a2bed72ad70b4dc366135b4124f8047601b1ac556884c8801286cf29edd3e075"} Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.830884 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-vlqtj" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.830896 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2bed72ad70b4dc366135b4124f8047601b1ac556884c8801286cf29edd3e075" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.957250 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl"] Dec 05 19:48:17 crc kubenswrapper[4982]: E1205 19:48:17.957942 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.957972 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.958360 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.959605 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.961495 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.962602 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.963316 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.965066 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:48:17 crc kubenswrapper[4982]: I1205 19:48:17.986897 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl"] Dec 05 19:48:18 crc kubenswrapper[4982]: I1205 19:48:18.153540 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9817cc48-c666-468c-a9cf-327fa1898ad9-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl\" (UID: \"9817cc48-c666-468c-a9cf-327fa1898ad9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:48:18 crc kubenswrapper[4982]: I1205 19:48:18.153705 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcf6v\" (UniqueName: \"kubernetes.io/projected/9817cc48-c666-468c-a9cf-327fa1898ad9-kube-api-access-zcf6v\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl\" (UID: \"9817cc48-c666-468c-a9cf-327fa1898ad9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:48:18 crc kubenswrapper[4982]: I1205 19:48:18.153780 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9817cc48-c666-468c-a9cf-327fa1898ad9-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl\" (UID: \"9817cc48-c666-468c-a9cf-327fa1898ad9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:48:18 crc kubenswrapper[4982]: I1205 19:48:18.256464 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9817cc48-c666-468c-a9cf-327fa1898ad9-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl\" (UID: \"9817cc48-c666-468c-a9cf-327fa1898ad9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:48:18 crc kubenswrapper[4982]: I1205 19:48:18.256619 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcf6v\" (UniqueName: \"kubernetes.io/projected/9817cc48-c666-468c-a9cf-327fa1898ad9-kube-api-access-zcf6v\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl\" (UID: \"9817cc48-c666-468c-a9cf-327fa1898ad9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:48:18 crc kubenswrapper[4982]: I1205 19:48:18.256662 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9817cc48-c666-468c-a9cf-327fa1898ad9-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl\" (UID: \"9817cc48-c666-468c-a9cf-327fa1898ad9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:48:18 crc kubenswrapper[4982]: I1205 19:48:18.263002 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9817cc48-c666-468c-a9cf-327fa1898ad9-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl\" (UID: \"9817cc48-c666-468c-a9cf-327fa1898ad9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:48:18 crc kubenswrapper[4982]: I1205 19:48:18.264292 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9817cc48-c666-468c-a9cf-327fa1898ad9-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl\" (UID: \"9817cc48-c666-468c-a9cf-327fa1898ad9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:48:18 crc kubenswrapper[4982]: I1205 19:48:18.281039 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcf6v\" (UniqueName: \"kubernetes.io/projected/9817cc48-c666-468c-a9cf-327fa1898ad9-kube-api-access-zcf6v\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl\" (UID: \"9817cc48-c666-468c-a9cf-327fa1898ad9\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:48:18 crc kubenswrapper[4982]: I1205 19:48:18.293910 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:48:18 crc kubenswrapper[4982]: I1205 19:48:18.775508 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl"] Dec 05 19:48:18 crc kubenswrapper[4982]: I1205 19:48:18.841247 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" event={"ID":"9817cc48-c666-468c-a9cf-327fa1898ad9","Type":"ContainerStarted","Data":"4555577a8f346b218bad3ae4cf2aa3936d7c4280c223a797aae800a986b97e8e"} Dec 05 19:48:19 crc kubenswrapper[4982]: I1205 19:48:19.864077 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" event={"ID":"9817cc48-c666-468c-a9cf-327fa1898ad9","Type":"ContainerStarted","Data":"ea9098321a3f44ad3b8efbd5c246cfeaa7719e12040197e807a1e9d8eefef2bb"} Dec 05 19:48:19 crc kubenswrapper[4982]: I1205 19:48:19.885611 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" podStartSLOduration=2.46691165 podStartE2EDuration="2.885587143s" podCreationTimestamp="2025-12-05 19:48:17 +0000 UTC" firstStartedPulling="2025-12-05 19:48:18.784699976 +0000 UTC m=+2077.666585971" lastFinishedPulling="2025-12-05 19:48:19.203375429 +0000 UTC m=+2078.085261464" observedRunningTime="2025-12-05 19:48:19.884736871 +0000 UTC m=+2078.766622886" watchObservedRunningTime="2025-12-05 19:48:19.885587143 +0000 UTC m=+2078.767473138" Dec 05 19:48:28 crc kubenswrapper[4982]: I1205 19:48:28.066368 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-lvhmg"] Dec 05 19:48:28 crc kubenswrapper[4982]: I1205 19:48:28.079322 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-lvhmg"] Dec 05 19:48:29 crc kubenswrapper[4982]: I1205 19:48:29.404339 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb75aca9-9973-41f0-8bf2-02a97b01f57f" path="/var/lib/kubelet/pods/cb75aca9-9973-41f0-8bf2-02a97b01f57f/volumes" Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.125777 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xsb5k"] Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.130353 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.141495 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xsb5k"] Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.159636 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d6d1252-65df-4808-b619-095a5bac277f-utilities\") pod \"redhat-operators-xsb5k\" (UID: \"8d6d1252-65df-4808-b619-095a5bac277f\") " pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.159836 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d6d1252-65df-4808-b619-095a5bac277f-catalog-content\") pod \"redhat-operators-xsb5k\" (UID: \"8d6d1252-65df-4808-b619-095a5bac277f\") " pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.160132 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzh7r\" (UniqueName: \"kubernetes.io/projected/8d6d1252-65df-4808-b619-095a5bac277f-kube-api-access-xzh7r\") pod \"redhat-operators-xsb5k\" (UID: \"8d6d1252-65df-4808-b619-095a5bac277f\") " pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.262240 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d6d1252-65df-4808-b619-095a5bac277f-utilities\") pod \"redhat-operators-xsb5k\" (UID: \"8d6d1252-65df-4808-b619-095a5bac277f\") " pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.262295 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d6d1252-65df-4808-b619-095a5bac277f-catalog-content\") pod \"redhat-operators-xsb5k\" (UID: \"8d6d1252-65df-4808-b619-095a5bac277f\") " pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.262463 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzh7r\" (UniqueName: \"kubernetes.io/projected/8d6d1252-65df-4808-b619-095a5bac277f-kube-api-access-xzh7r\") pod \"redhat-operators-xsb5k\" (UID: \"8d6d1252-65df-4808-b619-095a5bac277f\") " pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.262834 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d6d1252-65df-4808-b619-095a5bac277f-utilities\") pod \"redhat-operators-xsb5k\" (UID: \"8d6d1252-65df-4808-b619-095a5bac277f\") " pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.262885 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d6d1252-65df-4808-b619-095a5bac277f-catalog-content\") pod \"redhat-operators-xsb5k\" (UID: \"8d6d1252-65df-4808-b619-095a5bac277f\") " pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.284209 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzh7r\" (UniqueName: \"kubernetes.io/projected/8d6d1252-65df-4808-b619-095a5bac277f-kube-api-access-xzh7r\") pod \"redhat-operators-xsb5k\" (UID: \"8d6d1252-65df-4808-b619-095a5bac277f\") " pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.464422 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:40 crc kubenswrapper[4982]: I1205 19:48:40.938853 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xsb5k"] Dec 05 19:48:41 crc kubenswrapper[4982]: I1205 19:48:41.089658 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xsb5k" event={"ID":"8d6d1252-65df-4808-b619-095a5bac277f","Type":"ContainerStarted","Data":"3095b296029eadfc31f370f7cfc4d49cd01f400d853c2e6bacb8688ed6e2e112"} Dec 05 19:48:42 crc kubenswrapper[4982]: I1205 19:48:42.100455 4982 generic.go:334] "Generic (PLEG): container finished" podID="8d6d1252-65df-4808-b619-095a5bac277f" containerID="51d15c1ba49cd0bc217731b584058f9887245b70151af20bc40382f97748d73f" exitCode=0 Dec 05 19:48:42 crc kubenswrapper[4982]: I1205 19:48:42.100518 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xsb5k" event={"ID":"8d6d1252-65df-4808-b619-095a5bac277f","Type":"ContainerDied","Data":"51d15c1ba49cd0bc217731b584058f9887245b70151af20bc40382f97748d73f"} Dec 05 19:48:42 crc kubenswrapper[4982]: I1205 19:48:42.556747 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:48:42 crc kubenswrapper[4982]: I1205 19:48:42.556799 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:48:44 crc kubenswrapper[4982]: I1205 19:48:44.122260 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xsb5k" event={"ID":"8d6d1252-65df-4808-b619-095a5bac277f","Type":"ContainerStarted","Data":"59dc6f59f7ddc4190ff2ce83e7b266d507ab7743c121b1d2e2df8dcd3ebcce24"} Dec 05 19:48:47 crc kubenswrapper[4982]: I1205 19:48:47.149048 4982 generic.go:334] "Generic (PLEG): container finished" podID="8d6d1252-65df-4808-b619-095a5bac277f" containerID="59dc6f59f7ddc4190ff2ce83e7b266d507ab7743c121b1d2e2df8dcd3ebcce24" exitCode=0 Dec 05 19:48:47 crc kubenswrapper[4982]: I1205 19:48:47.149190 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xsb5k" event={"ID":"8d6d1252-65df-4808-b619-095a5bac277f","Type":"ContainerDied","Data":"59dc6f59f7ddc4190ff2ce83e7b266d507ab7743c121b1d2e2df8dcd3ebcce24"} Dec 05 19:48:48 crc kubenswrapper[4982]: I1205 19:48:48.164855 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xsb5k" event={"ID":"8d6d1252-65df-4808-b619-095a5bac277f","Type":"ContainerStarted","Data":"07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06"} Dec 05 19:48:48 crc kubenswrapper[4982]: I1205 19:48:48.189649 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xsb5k" podStartSLOduration=2.707667545 podStartE2EDuration="8.189630258s" podCreationTimestamp="2025-12-05 19:48:40 +0000 UTC" firstStartedPulling="2025-12-05 19:48:42.102523074 +0000 UTC m=+2100.984409059" lastFinishedPulling="2025-12-05 19:48:47.584485777 +0000 UTC m=+2106.466371772" observedRunningTime="2025-12-05 19:48:48.181755868 +0000 UTC m=+2107.063641883" watchObservedRunningTime="2025-12-05 19:48:48.189630258 +0000 UTC m=+2107.071516253" Dec 05 19:48:50 crc kubenswrapper[4982]: I1205 19:48:50.465607 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:50 crc kubenswrapper[4982]: I1205 19:48:50.466839 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:48:50 crc kubenswrapper[4982]: I1205 19:48:50.730331 4982 scope.go:117] "RemoveContainer" containerID="1bfe36a950e35eb890eeab03f09d8d2abead59a0605cb899bc55c6b4e1ae3ff7" Dec 05 19:48:51 crc kubenswrapper[4982]: I1205 19:48:51.511994 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xsb5k" podUID="8d6d1252-65df-4808-b619-095a5bac277f" containerName="registry-server" probeResult="failure" output=< Dec 05 19:48:51 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Dec 05 19:48:51 crc kubenswrapper[4982]: > Dec 05 19:49:00 crc kubenswrapper[4982]: I1205 19:49:00.509987 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:49:00 crc kubenswrapper[4982]: I1205 19:49:00.566396 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:49:00 crc kubenswrapper[4982]: I1205 19:49:00.745232 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xsb5k"] Dec 05 19:49:02 crc kubenswrapper[4982]: I1205 19:49:02.301045 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xsb5k" podUID="8d6d1252-65df-4808-b619-095a5bac277f" containerName="registry-server" containerID="cri-o://07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06" gracePeriod=2 Dec 05 19:49:02 crc kubenswrapper[4982]: I1205 19:49:02.850040 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:49:02 crc kubenswrapper[4982]: I1205 19:49:02.962377 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzh7r\" (UniqueName: \"kubernetes.io/projected/8d6d1252-65df-4808-b619-095a5bac277f-kube-api-access-xzh7r\") pod \"8d6d1252-65df-4808-b619-095a5bac277f\" (UID: \"8d6d1252-65df-4808-b619-095a5bac277f\") " Dec 05 19:49:02 crc kubenswrapper[4982]: I1205 19:49:02.962423 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d6d1252-65df-4808-b619-095a5bac277f-catalog-content\") pod \"8d6d1252-65df-4808-b619-095a5bac277f\" (UID: \"8d6d1252-65df-4808-b619-095a5bac277f\") " Dec 05 19:49:02 crc kubenswrapper[4982]: I1205 19:49:02.962598 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d6d1252-65df-4808-b619-095a5bac277f-utilities\") pod \"8d6d1252-65df-4808-b619-095a5bac277f\" (UID: \"8d6d1252-65df-4808-b619-095a5bac277f\") " Dec 05 19:49:02 crc kubenswrapper[4982]: I1205 19:49:02.963473 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d6d1252-65df-4808-b619-095a5bac277f-utilities" (OuterVolumeSpecName: "utilities") pod "8d6d1252-65df-4808-b619-095a5bac277f" (UID: "8d6d1252-65df-4808-b619-095a5bac277f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:49:02 crc kubenswrapper[4982]: I1205 19:49:02.972660 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d6d1252-65df-4808-b619-095a5bac277f-kube-api-access-xzh7r" (OuterVolumeSpecName: "kube-api-access-xzh7r") pod "8d6d1252-65df-4808-b619-095a5bac277f" (UID: "8d6d1252-65df-4808-b619-095a5bac277f"). InnerVolumeSpecName "kube-api-access-xzh7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.065463 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d6d1252-65df-4808-b619-095a5bac277f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.065521 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzh7r\" (UniqueName: \"kubernetes.io/projected/8d6d1252-65df-4808-b619-095a5bac277f-kube-api-access-xzh7r\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.090018 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d6d1252-65df-4808-b619-095a5bac277f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8d6d1252-65df-4808-b619-095a5bac277f" (UID: "8d6d1252-65df-4808-b619-095a5bac277f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.167517 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d6d1252-65df-4808-b619-095a5bac277f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.315084 4982 generic.go:334] "Generic (PLEG): container finished" podID="8d6d1252-65df-4808-b619-095a5bac277f" containerID="07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06" exitCode=0 Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.315138 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xsb5k" event={"ID":"8d6d1252-65df-4808-b619-095a5bac277f","Type":"ContainerDied","Data":"07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06"} Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.315191 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xsb5k" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.315238 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xsb5k" event={"ID":"8d6d1252-65df-4808-b619-095a5bac277f","Type":"ContainerDied","Data":"3095b296029eadfc31f370f7cfc4d49cd01f400d853c2e6bacb8688ed6e2e112"} Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.315263 4982 scope.go:117] "RemoveContainer" containerID="07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.356858 4982 scope.go:117] "RemoveContainer" containerID="59dc6f59f7ddc4190ff2ce83e7b266d507ab7743c121b1d2e2df8dcd3ebcce24" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.368204 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xsb5k"] Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.396332 4982 scope.go:117] "RemoveContainer" containerID="51d15c1ba49cd0bc217731b584058f9887245b70151af20bc40382f97748d73f" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.444979 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xsb5k"] Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.593262 4982 scope.go:117] "RemoveContainer" containerID="07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06" Dec 05 19:49:03 crc kubenswrapper[4982]: E1205 19:49:03.593872 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06\": container with ID starting with 07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06 not found: ID does not exist" containerID="07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.594007 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06"} err="failed to get container status \"07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06\": rpc error: code = NotFound desc = could not find container \"07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06\": container with ID starting with 07140bbb8c8895588e6673c3658ac256aacd63a8e9fd839b81fc2b4aad711d06 not found: ID does not exist" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.594040 4982 scope.go:117] "RemoveContainer" containerID="59dc6f59f7ddc4190ff2ce83e7b266d507ab7743c121b1d2e2df8dcd3ebcce24" Dec 05 19:49:03 crc kubenswrapper[4982]: E1205 19:49:03.594676 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59dc6f59f7ddc4190ff2ce83e7b266d507ab7743c121b1d2e2df8dcd3ebcce24\": container with ID starting with 59dc6f59f7ddc4190ff2ce83e7b266d507ab7743c121b1d2e2df8dcd3ebcce24 not found: ID does not exist" containerID="59dc6f59f7ddc4190ff2ce83e7b266d507ab7743c121b1d2e2df8dcd3ebcce24" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.594750 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59dc6f59f7ddc4190ff2ce83e7b266d507ab7743c121b1d2e2df8dcd3ebcce24"} err="failed to get container status \"59dc6f59f7ddc4190ff2ce83e7b266d507ab7743c121b1d2e2df8dcd3ebcce24\": rpc error: code = NotFound desc = could not find container \"59dc6f59f7ddc4190ff2ce83e7b266d507ab7743c121b1d2e2df8dcd3ebcce24\": container with ID starting with 59dc6f59f7ddc4190ff2ce83e7b266d507ab7743c121b1d2e2df8dcd3ebcce24 not found: ID does not exist" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.594827 4982 scope.go:117] "RemoveContainer" containerID="51d15c1ba49cd0bc217731b584058f9887245b70151af20bc40382f97748d73f" Dec 05 19:49:03 crc kubenswrapper[4982]: E1205 19:49:03.604552 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51d15c1ba49cd0bc217731b584058f9887245b70151af20bc40382f97748d73f\": container with ID starting with 51d15c1ba49cd0bc217731b584058f9887245b70151af20bc40382f97748d73f not found: ID does not exist" containerID="51d15c1ba49cd0bc217731b584058f9887245b70151af20bc40382f97748d73f" Dec 05 19:49:03 crc kubenswrapper[4982]: I1205 19:49:03.604638 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51d15c1ba49cd0bc217731b584058f9887245b70151af20bc40382f97748d73f"} err="failed to get container status \"51d15c1ba49cd0bc217731b584058f9887245b70151af20bc40382f97748d73f\": rpc error: code = NotFound desc = could not find container \"51d15c1ba49cd0bc217731b584058f9887245b70151af20bc40382f97748d73f\": container with ID starting with 51d15c1ba49cd0bc217731b584058f9887245b70151af20bc40382f97748d73f not found: ID does not exist" Dec 05 19:49:05 crc kubenswrapper[4982]: I1205 19:49:05.407229 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d6d1252-65df-4808-b619-095a5bac277f" path="/var/lib/kubelet/pods/8d6d1252-65df-4808-b619-095a5bac277f/volumes" Dec 05 19:49:12 crc kubenswrapper[4982]: I1205 19:49:12.556916 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:49:12 crc kubenswrapper[4982]: I1205 19:49:12.557971 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:49:12 crc kubenswrapper[4982]: I1205 19:49:12.558044 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:49:12 crc kubenswrapper[4982]: I1205 19:49:12.559598 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f01c1d5189425b39486d48c52bcbcc79562af7955203c5d8cee9a8137a433a91"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 19:49:12 crc kubenswrapper[4982]: I1205 19:49:12.559737 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://f01c1d5189425b39486d48c52bcbcc79562af7955203c5d8cee9a8137a433a91" gracePeriod=600 Dec 05 19:49:13 crc kubenswrapper[4982]: I1205 19:49:13.417891 4982 generic.go:334] "Generic (PLEG): container finished" podID="9817cc48-c666-468c-a9cf-327fa1898ad9" containerID="ea9098321a3f44ad3b8efbd5c246cfeaa7719e12040197e807a1e9d8eefef2bb" exitCode=0 Dec 05 19:49:13 crc kubenswrapper[4982]: I1205 19:49:13.417990 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" event={"ID":"9817cc48-c666-468c-a9cf-327fa1898ad9","Type":"ContainerDied","Data":"ea9098321a3f44ad3b8efbd5c246cfeaa7719e12040197e807a1e9d8eefef2bb"} Dec 05 19:49:13 crc kubenswrapper[4982]: I1205 19:49:13.421022 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="f01c1d5189425b39486d48c52bcbcc79562af7955203c5d8cee9a8137a433a91" exitCode=0 Dec 05 19:49:13 crc kubenswrapper[4982]: I1205 19:49:13.421059 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"f01c1d5189425b39486d48c52bcbcc79562af7955203c5d8cee9a8137a433a91"} Dec 05 19:49:13 crc kubenswrapper[4982]: I1205 19:49:13.421081 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf"} Dec 05 19:49:13 crc kubenswrapper[4982]: I1205 19:49:13.421097 4982 scope.go:117] "RemoveContainer" containerID="45ec533502192c8159f53bf01af7b1f8a2f2260d40983654d08cb997e4efa313" Dec 05 19:49:14 crc kubenswrapper[4982]: I1205 19:49:14.940372 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.127277 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9817cc48-c666-468c-a9cf-327fa1898ad9-inventory\") pod \"9817cc48-c666-468c-a9cf-327fa1898ad9\" (UID: \"9817cc48-c666-468c-a9cf-327fa1898ad9\") " Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.127449 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9817cc48-c666-468c-a9cf-327fa1898ad9-ssh-key\") pod \"9817cc48-c666-468c-a9cf-327fa1898ad9\" (UID: \"9817cc48-c666-468c-a9cf-327fa1898ad9\") " Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.127480 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcf6v\" (UniqueName: \"kubernetes.io/projected/9817cc48-c666-468c-a9cf-327fa1898ad9-kube-api-access-zcf6v\") pod \"9817cc48-c666-468c-a9cf-327fa1898ad9\" (UID: \"9817cc48-c666-468c-a9cf-327fa1898ad9\") " Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.134798 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9817cc48-c666-468c-a9cf-327fa1898ad9-kube-api-access-zcf6v" (OuterVolumeSpecName: "kube-api-access-zcf6v") pod "9817cc48-c666-468c-a9cf-327fa1898ad9" (UID: "9817cc48-c666-468c-a9cf-327fa1898ad9"). InnerVolumeSpecName "kube-api-access-zcf6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.161611 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9817cc48-c666-468c-a9cf-327fa1898ad9-inventory" (OuterVolumeSpecName: "inventory") pod "9817cc48-c666-468c-a9cf-327fa1898ad9" (UID: "9817cc48-c666-468c-a9cf-327fa1898ad9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.173631 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9817cc48-c666-468c-a9cf-327fa1898ad9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9817cc48-c666-468c-a9cf-327fa1898ad9" (UID: "9817cc48-c666-468c-a9cf-327fa1898ad9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.230279 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9817cc48-c666-468c-a9cf-327fa1898ad9-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.230603 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9817cc48-c666-468c-a9cf-327fa1898ad9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.230620 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcf6v\" (UniqueName: \"kubernetes.io/projected/9817cc48-c666-468c-a9cf-327fa1898ad9-kube-api-access-zcf6v\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.446911 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" event={"ID":"9817cc48-c666-468c-a9cf-327fa1898ad9","Type":"ContainerDied","Data":"4555577a8f346b218bad3ae4cf2aa3936d7c4280c223a797aae800a986b97e8e"} Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.446956 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4555577a8f346b218bad3ae4cf2aa3936d7c4280c223a797aae800a986b97e8e" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.446989 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.549832 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-5zqdl"] Dec 05 19:49:15 crc kubenswrapper[4982]: E1205 19:49:15.550598 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d6d1252-65df-4808-b619-095a5bac277f" containerName="registry-server" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.550697 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d6d1252-65df-4808-b619-095a5bac277f" containerName="registry-server" Dec 05 19:49:15 crc kubenswrapper[4982]: E1205 19:49:15.550777 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9817cc48-c666-468c-a9cf-327fa1898ad9" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.550877 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9817cc48-c666-468c-a9cf-327fa1898ad9" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 19:49:15 crc kubenswrapper[4982]: E1205 19:49:15.550971 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d6d1252-65df-4808-b619-095a5bac277f" containerName="extract-utilities" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.551044 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d6d1252-65df-4808-b619-095a5bac277f" containerName="extract-utilities" Dec 05 19:49:15 crc kubenswrapper[4982]: E1205 19:49:15.551141 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d6d1252-65df-4808-b619-095a5bac277f" containerName="extract-content" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.551246 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d6d1252-65df-4808-b619-095a5bac277f" containerName="extract-content" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.551610 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d6d1252-65df-4808-b619-095a5bac277f" containerName="registry-server" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.551723 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9817cc48-c666-468c-a9cf-327fa1898ad9" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.552699 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.557207 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.557595 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.557703 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.557915 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.566332 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-5zqdl"] Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.638002 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1f38261e-9d6a-4983-873c-7e7cf37ebb81-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-5zqdl\" (UID: \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\") " pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.638095 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1f38261e-9d6a-4983-873c-7e7cf37ebb81-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-5zqdl\" (UID: \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\") " pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.638223 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kx2x\" (UniqueName: \"kubernetes.io/projected/1f38261e-9d6a-4983-873c-7e7cf37ebb81-kube-api-access-9kx2x\") pod \"ssh-known-hosts-edpm-deployment-5zqdl\" (UID: \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\") " pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.740332 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kx2x\" (UniqueName: \"kubernetes.io/projected/1f38261e-9d6a-4983-873c-7e7cf37ebb81-kube-api-access-9kx2x\") pod \"ssh-known-hosts-edpm-deployment-5zqdl\" (UID: \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\") " pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.740461 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1f38261e-9d6a-4983-873c-7e7cf37ebb81-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-5zqdl\" (UID: \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\") " pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.740519 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1f38261e-9d6a-4983-873c-7e7cf37ebb81-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-5zqdl\" (UID: \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\") " pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.745835 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1f38261e-9d6a-4983-873c-7e7cf37ebb81-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-5zqdl\" (UID: \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\") " pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.746216 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1f38261e-9d6a-4983-873c-7e7cf37ebb81-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-5zqdl\" (UID: \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\") " pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.758178 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kx2x\" (UniqueName: \"kubernetes.io/projected/1f38261e-9d6a-4983-873c-7e7cf37ebb81-kube-api-access-9kx2x\") pod \"ssh-known-hosts-edpm-deployment-5zqdl\" (UID: \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\") " pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:15 crc kubenswrapper[4982]: I1205 19:49:15.872724 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:16 crc kubenswrapper[4982]: I1205 19:49:16.448847 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-5zqdl"] Dec 05 19:49:16 crc kubenswrapper[4982]: W1205 19:49:16.449547 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f38261e_9d6a_4983_873c_7e7cf37ebb81.slice/crio-fddd9d83ab5d481020edc392dd95a981042b9d39e149dceaa244825c044ca8de WatchSource:0}: Error finding container fddd9d83ab5d481020edc392dd95a981042b9d39e149dceaa244825c044ca8de: Status 404 returned error can't find the container with id fddd9d83ab5d481020edc392dd95a981042b9d39e149dceaa244825c044ca8de Dec 05 19:49:17 crc kubenswrapper[4982]: I1205 19:49:17.469498 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" event={"ID":"1f38261e-9d6a-4983-873c-7e7cf37ebb81","Type":"ContainerStarted","Data":"c0795a1caae3b4e0400308e8bd9bf5b2ff502a505e86297bd69a2a1b47bd7453"} Dec 05 19:49:17 crc kubenswrapper[4982]: I1205 19:49:17.470061 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" event={"ID":"1f38261e-9d6a-4983-873c-7e7cf37ebb81","Type":"ContainerStarted","Data":"fddd9d83ab5d481020edc392dd95a981042b9d39e149dceaa244825c044ca8de"} Dec 05 19:49:17 crc kubenswrapper[4982]: I1205 19:49:17.487801 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" podStartSLOduration=1.99551957 podStartE2EDuration="2.487779419s" podCreationTimestamp="2025-12-05 19:49:15 +0000 UTC" firstStartedPulling="2025-12-05 19:49:16.453025869 +0000 UTC m=+2135.334911864" lastFinishedPulling="2025-12-05 19:49:16.945285708 +0000 UTC m=+2135.827171713" observedRunningTime="2025-12-05 19:49:17.484935007 +0000 UTC m=+2136.366821012" watchObservedRunningTime="2025-12-05 19:49:17.487779419 +0000 UTC m=+2136.369665414" Dec 05 19:49:25 crc kubenswrapper[4982]: I1205 19:49:25.552256 4982 generic.go:334] "Generic (PLEG): container finished" podID="1f38261e-9d6a-4983-873c-7e7cf37ebb81" containerID="c0795a1caae3b4e0400308e8bd9bf5b2ff502a505e86297bd69a2a1b47bd7453" exitCode=0 Dec 05 19:49:25 crc kubenswrapper[4982]: I1205 19:49:25.552344 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" event={"ID":"1f38261e-9d6a-4983-873c-7e7cf37ebb81","Type":"ContainerDied","Data":"c0795a1caae3b4e0400308e8bd9bf5b2ff502a505e86297bd69a2a1b47bd7453"} Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.125224 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.184623 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9kx2x\" (UniqueName: \"kubernetes.io/projected/1f38261e-9d6a-4983-873c-7e7cf37ebb81-kube-api-access-9kx2x\") pod \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\" (UID: \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\") " Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.184980 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1f38261e-9d6a-4983-873c-7e7cf37ebb81-inventory-0\") pod \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\" (UID: \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\") " Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.185230 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1f38261e-9d6a-4983-873c-7e7cf37ebb81-ssh-key-openstack-edpm-ipam\") pod \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\" (UID: \"1f38261e-9d6a-4983-873c-7e7cf37ebb81\") " Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.191314 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f38261e-9d6a-4983-873c-7e7cf37ebb81-kube-api-access-9kx2x" (OuterVolumeSpecName: "kube-api-access-9kx2x") pod "1f38261e-9d6a-4983-873c-7e7cf37ebb81" (UID: "1f38261e-9d6a-4983-873c-7e7cf37ebb81"). InnerVolumeSpecName "kube-api-access-9kx2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.212723 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f38261e-9d6a-4983-873c-7e7cf37ebb81-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "1f38261e-9d6a-4983-873c-7e7cf37ebb81" (UID: "1f38261e-9d6a-4983-873c-7e7cf37ebb81"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.236049 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f38261e-9d6a-4983-873c-7e7cf37ebb81-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "1f38261e-9d6a-4983-873c-7e7cf37ebb81" (UID: "1f38261e-9d6a-4983-873c-7e7cf37ebb81"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.288667 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/1f38261e-9d6a-4983-873c-7e7cf37ebb81-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.288807 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9kx2x\" (UniqueName: \"kubernetes.io/projected/1f38261e-9d6a-4983-873c-7e7cf37ebb81-kube-api-access-9kx2x\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.288873 4982 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/1f38261e-9d6a-4983-873c-7e7cf37ebb81-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.572514 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" event={"ID":"1f38261e-9d6a-4983-873c-7e7cf37ebb81","Type":"ContainerDied","Data":"fddd9d83ab5d481020edc392dd95a981042b9d39e149dceaa244825c044ca8de"} Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.572841 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fddd9d83ab5d481020edc392dd95a981042b9d39e149dceaa244825c044ca8de" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.572594 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-5zqdl" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.659313 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld"] Dec 05 19:49:27 crc kubenswrapper[4982]: E1205 19:49:27.659769 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f38261e-9d6a-4983-873c-7e7cf37ebb81" containerName="ssh-known-hosts-edpm-deployment" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.659788 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f38261e-9d6a-4983-873c-7e7cf37ebb81" containerName="ssh-known-hosts-edpm-deployment" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.660052 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f38261e-9d6a-4983-873c-7e7cf37ebb81" containerName="ssh-known-hosts-edpm-deployment" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.660925 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.677019 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.677275 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.677409 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.677732 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld"] Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.678087 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.696739 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-x59ld\" (UID: \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.696780 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-x59ld\" (UID: \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.696812 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5mtl\" (UniqueName: \"kubernetes.io/projected/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-kube-api-access-s5mtl\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-x59ld\" (UID: \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.798927 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-x59ld\" (UID: \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.798992 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-x59ld\" (UID: \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.799038 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5mtl\" (UniqueName: \"kubernetes.io/projected/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-kube-api-access-s5mtl\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-x59ld\" (UID: \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.803028 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-x59ld\" (UID: \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.808761 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-x59ld\" (UID: \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.830275 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5mtl\" (UniqueName: \"kubernetes.io/projected/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-kube-api-access-s5mtl\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-x59ld\" (UID: \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:27 crc kubenswrapper[4982]: I1205 19:49:27.982229 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:28 crc kubenswrapper[4982]: I1205 19:49:28.487003 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld"] Dec 05 19:49:28 crc kubenswrapper[4982]: W1205 19:49:28.491293 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod96a0a8ec_a47f_4b4e_abf9_53e0f6a30c09.slice/crio-bab6385d4a422f45a091626222b0849d3608f1c3f9d7ddd1c7fa239f068dad41 WatchSource:0}: Error finding container bab6385d4a422f45a091626222b0849d3608f1c3f9d7ddd1c7fa239f068dad41: Status 404 returned error can't find the container with id bab6385d4a422f45a091626222b0849d3608f1c3f9d7ddd1c7fa239f068dad41 Dec 05 19:49:28 crc kubenswrapper[4982]: I1205 19:49:28.585696 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" event={"ID":"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09","Type":"ContainerStarted","Data":"bab6385d4a422f45a091626222b0849d3608f1c3f9d7ddd1c7fa239f068dad41"} Dec 05 19:49:29 crc kubenswrapper[4982]: I1205 19:49:29.596596 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" event={"ID":"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09","Type":"ContainerStarted","Data":"2abba616be49c0cbb374179e263e3fc5938536069688815cfd75e5ccd2bb2780"} Dec 05 19:49:29 crc kubenswrapper[4982]: I1205 19:49:29.632104 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" podStartSLOduration=2.221921139 podStartE2EDuration="2.632082875s" podCreationTimestamp="2025-12-05 19:49:27 +0000 UTC" firstStartedPulling="2025-12-05 19:49:28.492795738 +0000 UTC m=+2147.374681733" lastFinishedPulling="2025-12-05 19:49:28.902957464 +0000 UTC m=+2147.784843469" observedRunningTime="2025-12-05 19:49:29.616019148 +0000 UTC m=+2148.497905163" watchObservedRunningTime="2025-12-05 19:49:29.632082875 +0000 UTC m=+2148.513968890" Dec 05 19:49:38 crc kubenswrapper[4982]: I1205 19:49:38.692275 4982 generic.go:334] "Generic (PLEG): container finished" podID="96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09" containerID="2abba616be49c0cbb374179e263e3fc5938536069688815cfd75e5ccd2bb2780" exitCode=0 Dec 05 19:49:38 crc kubenswrapper[4982]: I1205 19:49:38.692350 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" event={"ID":"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09","Type":"ContainerDied","Data":"2abba616be49c0cbb374179e263e3fc5938536069688815cfd75e5ccd2bb2780"} Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.237094 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.287463 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-ssh-key\") pod \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\" (UID: \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\") " Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.287623 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-inventory\") pod \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\" (UID: \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\") " Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.287712 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5mtl\" (UniqueName: \"kubernetes.io/projected/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-kube-api-access-s5mtl\") pod \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\" (UID: \"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09\") " Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.294512 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-kube-api-access-s5mtl" (OuterVolumeSpecName: "kube-api-access-s5mtl") pod "96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09" (UID: "96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09"). InnerVolumeSpecName "kube-api-access-s5mtl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.318652 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09" (UID: "96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.320902 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-inventory" (OuterVolumeSpecName: "inventory") pod "96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09" (UID: "96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.389510 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.389573 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.389587 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5mtl\" (UniqueName: \"kubernetes.io/projected/96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09-kube-api-access-s5mtl\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.713141 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" event={"ID":"96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09","Type":"ContainerDied","Data":"bab6385d4a422f45a091626222b0849d3608f1c3f9d7ddd1c7fa239f068dad41"} Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.713201 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bab6385d4a422f45a091626222b0849d3608f1c3f9d7ddd1c7fa239f068dad41" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.713363 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-x59ld" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.804968 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864"] Dec 05 19:49:40 crc kubenswrapper[4982]: E1205 19:49:40.805638 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.805661 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.805942 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.807173 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.808877 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.809588 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.809645 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.809684 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.815644 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864"] Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.897236 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c22ac692-8285-4e89-8c2b-28b2bc125fa3-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cf864\" (UID: \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.897360 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c22ac692-8285-4e89-8c2b-28b2bc125fa3-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cf864\" (UID: \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:40 crc kubenswrapper[4982]: I1205 19:49:40.897649 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9gxg\" (UniqueName: \"kubernetes.io/projected/c22ac692-8285-4e89-8c2b-28b2bc125fa3-kube-api-access-c9gxg\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cf864\" (UID: \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:41 crc kubenswrapper[4982]: I1205 19:49:41.000449 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c22ac692-8285-4e89-8c2b-28b2bc125fa3-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cf864\" (UID: \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:41 crc kubenswrapper[4982]: I1205 19:49:41.000580 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c22ac692-8285-4e89-8c2b-28b2bc125fa3-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cf864\" (UID: \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:41 crc kubenswrapper[4982]: I1205 19:49:41.000773 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9gxg\" (UniqueName: \"kubernetes.io/projected/c22ac692-8285-4e89-8c2b-28b2bc125fa3-kube-api-access-c9gxg\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cf864\" (UID: \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:41 crc kubenswrapper[4982]: I1205 19:49:41.005955 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c22ac692-8285-4e89-8c2b-28b2bc125fa3-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cf864\" (UID: \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:41 crc kubenswrapper[4982]: I1205 19:49:41.025870 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c22ac692-8285-4e89-8c2b-28b2bc125fa3-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cf864\" (UID: \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:41 crc kubenswrapper[4982]: I1205 19:49:41.034244 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9gxg\" (UniqueName: \"kubernetes.io/projected/c22ac692-8285-4e89-8c2b-28b2bc125fa3-kube-api-access-c9gxg\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-cf864\" (UID: \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:41 crc kubenswrapper[4982]: I1205 19:49:41.124581 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:41 crc kubenswrapper[4982]: I1205 19:49:41.680423 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864"] Dec 05 19:49:41 crc kubenswrapper[4982]: I1205 19:49:41.725867 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" event={"ID":"c22ac692-8285-4e89-8c2b-28b2bc125fa3","Type":"ContainerStarted","Data":"da092f9c6c2448a55a0fdb37ce5c965f4a1514e5298cacdc6ba9e2a39011f5f2"} Dec 05 19:49:42 crc kubenswrapper[4982]: I1205 19:49:42.107342 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:49:42 crc kubenswrapper[4982]: I1205 19:49:42.733774 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" event={"ID":"c22ac692-8285-4e89-8c2b-28b2bc125fa3","Type":"ContainerStarted","Data":"922f624318474a4a95b0cbddf416427872cd02788c3a89a467b177c5ff9000f3"} Dec 05 19:49:42 crc kubenswrapper[4982]: I1205 19:49:42.752475 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" podStartSLOduration=2.331612771 podStartE2EDuration="2.752457858s" podCreationTimestamp="2025-12-05 19:49:40 +0000 UTC" firstStartedPulling="2025-12-05 19:49:41.682972691 +0000 UTC m=+2160.564858686" lastFinishedPulling="2025-12-05 19:49:42.103817778 +0000 UTC m=+2160.985703773" observedRunningTime="2025-12-05 19:49:42.747355699 +0000 UTC m=+2161.629241694" watchObservedRunningTime="2025-12-05 19:49:42.752457858 +0000 UTC m=+2161.634343853" Dec 05 19:49:48 crc kubenswrapper[4982]: I1205 19:49:48.055535 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-np8hc"] Dec 05 19:49:48 crc kubenswrapper[4982]: I1205 19:49:48.063641 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-np8hc"] Dec 05 19:49:49 crc kubenswrapper[4982]: I1205 19:49:49.410678 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac18c894-f60b-4db2-80a2-82f23f52f9a2" path="/var/lib/kubelet/pods/ac18c894-f60b-4db2-80a2-82f23f52f9a2/volumes" Dec 05 19:49:50 crc kubenswrapper[4982]: I1205 19:49:50.798788 4982 scope.go:117] "RemoveContainer" containerID="2ecf85a70d2d177d0dabfdc3dbfb1a2f883dd54d011427190c021ee75087c917" Dec 05 19:49:51 crc kubenswrapper[4982]: I1205 19:49:51.812888 4982 generic.go:334] "Generic (PLEG): container finished" podID="c22ac692-8285-4e89-8c2b-28b2bc125fa3" containerID="922f624318474a4a95b0cbddf416427872cd02788c3a89a467b177c5ff9000f3" exitCode=0 Dec 05 19:49:51 crc kubenswrapper[4982]: I1205 19:49:51.812964 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" event={"ID":"c22ac692-8285-4e89-8c2b-28b2bc125fa3","Type":"ContainerDied","Data":"922f624318474a4a95b0cbddf416427872cd02788c3a89a467b177c5ff9000f3"} Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.041623 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-lvfbw"] Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.052711 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-lvfbw"] Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.405212 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78479310-9820-4899-98ac-243473c53a62" path="/var/lib/kubelet/pods/78479310-9820-4899-98ac-243473c53a62/volumes" Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.452407 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.473759 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c22ac692-8285-4e89-8c2b-28b2bc125fa3-inventory\") pod \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\" (UID: \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\") " Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.473837 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9gxg\" (UniqueName: \"kubernetes.io/projected/c22ac692-8285-4e89-8c2b-28b2bc125fa3-kube-api-access-c9gxg\") pod \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\" (UID: \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\") " Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.474233 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c22ac692-8285-4e89-8c2b-28b2bc125fa3-ssh-key\") pod \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\" (UID: \"c22ac692-8285-4e89-8c2b-28b2bc125fa3\") " Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.478971 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c22ac692-8285-4e89-8c2b-28b2bc125fa3-kube-api-access-c9gxg" (OuterVolumeSpecName: "kube-api-access-c9gxg") pod "c22ac692-8285-4e89-8c2b-28b2bc125fa3" (UID: "c22ac692-8285-4e89-8c2b-28b2bc125fa3"). InnerVolumeSpecName "kube-api-access-c9gxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.501522 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c22ac692-8285-4e89-8c2b-28b2bc125fa3-inventory" (OuterVolumeSpecName: "inventory") pod "c22ac692-8285-4e89-8c2b-28b2bc125fa3" (UID: "c22ac692-8285-4e89-8c2b-28b2bc125fa3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.513530 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c22ac692-8285-4e89-8c2b-28b2bc125fa3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c22ac692-8285-4e89-8c2b-28b2bc125fa3" (UID: "c22ac692-8285-4e89-8c2b-28b2bc125fa3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.578013 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c22ac692-8285-4e89-8c2b-28b2bc125fa3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.578056 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c22ac692-8285-4e89-8c2b-28b2bc125fa3-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.578069 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9gxg\" (UniqueName: \"kubernetes.io/projected/c22ac692-8285-4e89-8c2b-28b2bc125fa3-kube-api-access-c9gxg\") on node \"crc\" DevicePath \"\"" Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.840057 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" event={"ID":"c22ac692-8285-4e89-8c2b-28b2bc125fa3","Type":"ContainerDied","Data":"da092f9c6c2448a55a0fdb37ce5c965f4a1514e5298cacdc6ba9e2a39011f5f2"} Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.840294 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da092f9c6c2448a55a0fdb37ce5c965f4a1514e5298cacdc6ba9e2a39011f5f2" Dec 05 19:49:53 crc kubenswrapper[4982]: I1205 19:49:53.840169 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-cf864" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.011861 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q"] Dec 05 19:49:54 crc kubenswrapper[4982]: E1205 19:49:54.012311 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c22ac692-8285-4e89-8c2b-28b2bc125fa3" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.012327 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c22ac692-8285-4e89-8c2b-28b2bc125fa3" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.012498 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c22ac692-8285-4e89-8c2b-28b2bc125fa3" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.013256 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.015662 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.017238 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.017503 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.017665 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.017817 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.018090 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.018257 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.019744 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.043068 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q"] Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087137 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087204 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087248 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087276 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087298 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087339 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cccbz\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-kube-api-access-cccbz\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087415 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087484 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087585 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087694 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087731 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087777 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087812 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.087869 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190158 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190216 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190243 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190279 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190322 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190346 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190369 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190449 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cccbz\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-kube-api-access-cccbz\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190470 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190495 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190534 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190577 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190608 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.190945 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.194977 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.195189 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.195622 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.195899 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.196763 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.196981 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.197034 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.197713 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.197920 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.198180 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.198787 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.199349 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.201770 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.217567 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cccbz\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-kube-api-access-cccbz\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.332828 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:49:54 crc kubenswrapper[4982]: I1205 19:49:54.919884 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q"] Dec 05 19:49:55 crc kubenswrapper[4982]: I1205 19:49:55.866728 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" event={"ID":"ddbe0e8f-d183-4f24-a7cf-221b3221cb27","Type":"ContainerStarted","Data":"d3f13cf8ed6f5758b7471b3b6e4fecd3facac3be4fbdc368725a1e2393c6d72e"} Dec 05 19:49:55 crc kubenswrapper[4982]: I1205 19:49:55.868359 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" event={"ID":"ddbe0e8f-d183-4f24-a7cf-221b3221cb27","Type":"ContainerStarted","Data":"047f917a424edcbdaf1a28630268076a0f93e76ed6dbed26ffb7cde762ed7036"} Dec 05 19:50:36 crc kubenswrapper[4982]: I1205 19:50:36.261672 4982 generic.go:334] "Generic (PLEG): container finished" podID="ddbe0e8f-d183-4f24-a7cf-221b3221cb27" containerID="d3f13cf8ed6f5758b7471b3b6e4fecd3facac3be4fbdc368725a1e2393c6d72e" exitCode=0 Dec 05 19:50:36 crc kubenswrapper[4982]: I1205 19:50:36.262251 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" event={"ID":"ddbe0e8f-d183-4f24-a7cf-221b3221cb27","Type":"ContainerDied","Data":"d3f13cf8ed6f5758b7471b3b6e4fecd3facac3be4fbdc368725a1e2393c6d72e"} Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.744456 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.856725 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-inventory\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.856800 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cccbz\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-kube-api-access-cccbz\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.856855 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-nova-combined-ca-bundle\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.856913 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-telemetry-combined-ca-bundle\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.856972 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.857042 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-ovn-combined-ca-bundle\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.857072 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.857105 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-bootstrap-combined-ca-bundle\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.857135 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-ovn-default-certs-0\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.857224 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.857258 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-libvirt-combined-ca-bundle\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.857318 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-ssh-key\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.857389 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-neutron-metadata-combined-ca-bundle\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.857428 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-repo-setup-combined-ca-bundle\") pod \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\" (UID: \"ddbe0e8f-d183-4f24-a7cf-221b3221cb27\") " Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.864564 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.864591 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.864829 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-kube-api-access-cccbz" (OuterVolumeSpecName: "kube-api-access-cccbz") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "kube-api-access-cccbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.865166 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.865798 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.865860 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.866535 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.866585 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.866858 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.868528 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.870310 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.876283 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.892516 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.898697 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-inventory" (OuterVolumeSpecName: "inventory") pod "ddbe0e8f-d183-4f24-a7cf-221b3221cb27" (UID: "ddbe0e8f-d183-4f24-a7cf-221b3221cb27"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.960220 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.960444 4982 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.960530 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.960603 4982 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.960707 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.960787 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.960883 4982 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.960964 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.961042 4982 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.961119 4982 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.961223 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.961297 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cccbz\" (UniqueName: \"kubernetes.io/projected/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-kube-api-access-cccbz\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.961365 4982 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:37 crc kubenswrapper[4982]: I1205 19:50:37.961443 4982 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ddbe0e8f-d183-4f24-a7cf-221b3221cb27-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.279216 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" event={"ID":"ddbe0e8f-d183-4f24-a7cf-221b3221cb27","Type":"ContainerDied","Data":"047f917a424edcbdaf1a28630268076a0f93e76ed6dbed26ffb7cde762ed7036"} Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.279257 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="047f917a424edcbdaf1a28630268076a0f93e76ed6dbed26ffb7cde762ed7036" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.279300 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.412648 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj"] Dec 05 19:50:38 crc kubenswrapper[4982]: E1205 19:50:38.413471 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddbe0e8f-d183-4f24-a7cf-221b3221cb27" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.413506 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddbe0e8f-d183-4f24-a7cf-221b3221cb27" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.413753 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddbe0e8f-d183-4f24-a7cf-221b3221cb27" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.414727 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.417883 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.418440 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.419083 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.420079 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.421017 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.430800 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj"] Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.572203 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-297q5\" (UniqueName: \"kubernetes.io/projected/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-kube-api-access-297q5\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.572284 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.572350 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.572405 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.572437 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.673911 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-297q5\" (UniqueName: \"kubernetes.io/projected/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-kube-api-access-297q5\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.673977 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.674019 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.674067 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.674098 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.675164 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.678905 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.679602 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.687134 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.690967 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-297q5\" (UniqueName: \"kubernetes.io/projected/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-kube-api-access-297q5\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-vgqgj\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:38 crc kubenswrapper[4982]: I1205 19:50:38.731943 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:50:39 crc kubenswrapper[4982]: I1205 19:50:39.300798 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj"] Dec 05 19:50:40 crc kubenswrapper[4982]: I1205 19:50:40.296882 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" event={"ID":"19ae7f76-4c93-41fe-9ac4-aead0ad360fe","Type":"ContainerStarted","Data":"8053918d8a1ae575888ae4386f42dfd91dfafc01d6b5b5f6dec8374bf51945f7"} Dec 05 19:50:40 crc kubenswrapper[4982]: I1205 19:50:40.297216 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" event={"ID":"19ae7f76-4c93-41fe-9ac4-aead0ad360fe","Type":"ContainerStarted","Data":"505351efdd21c122f72de31796ac92deb96f76ebd68abbf22d829b79a187e223"} Dec 05 19:50:40 crc kubenswrapper[4982]: I1205 19:50:40.317670 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" podStartSLOduration=1.910750859 podStartE2EDuration="2.317647592s" podCreationTimestamp="2025-12-05 19:50:38 +0000 UTC" firstStartedPulling="2025-12-05 19:50:39.30596637 +0000 UTC m=+2218.187852365" lastFinishedPulling="2025-12-05 19:50:39.712863093 +0000 UTC m=+2218.594749098" observedRunningTime="2025-12-05 19:50:40.317364775 +0000 UTC m=+2219.199250770" watchObservedRunningTime="2025-12-05 19:50:40.317647592 +0000 UTC m=+2219.199533607" Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.484143 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-825vs"] Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.487488 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.496122 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-825vs"] Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.642259 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ba9f103-c225-4a1f-9fff-278e8c987189-catalog-content\") pod \"community-operators-825vs\" (UID: \"3ba9f103-c225-4a1f-9fff-278e8c987189\") " pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.642589 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ba9f103-c225-4a1f-9fff-278e8c987189-utilities\") pod \"community-operators-825vs\" (UID: \"3ba9f103-c225-4a1f-9fff-278e8c987189\") " pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.642803 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9lc7\" (UniqueName: \"kubernetes.io/projected/3ba9f103-c225-4a1f-9fff-278e8c987189-kube-api-access-r9lc7\") pod \"community-operators-825vs\" (UID: \"3ba9f103-c225-4a1f-9fff-278e8c987189\") " pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.745287 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ba9f103-c225-4a1f-9fff-278e8c987189-catalog-content\") pod \"community-operators-825vs\" (UID: \"3ba9f103-c225-4a1f-9fff-278e8c987189\") " pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.745435 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ba9f103-c225-4a1f-9fff-278e8c987189-utilities\") pod \"community-operators-825vs\" (UID: \"3ba9f103-c225-4a1f-9fff-278e8c987189\") " pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.745510 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9lc7\" (UniqueName: \"kubernetes.io/projected/3ba9f103-c225-4a1f-9fff-278e8c987189-kube-api-access-r9lc7\") pod \"community-operators-825vs\" (UID: \"3ba9f103-c225-4a1f-9fff-278e8c987189\") " pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.745832 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ba9f103-c225-4a1f-9fff-278e8c987189-catalog-content\") pod \"community-operators-825vs\" (UID: \"3ba9f103-c225-4a1f-9fff-278e8c987189\") " pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.745858 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ba9f103-c225-4a1f-9fff-278e8c987189-utilities\") pod \"community-operators-825vs\" (UID: \"3ba9f103-c225-4a1f-9fff-278e8c987189\") " pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.787956 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9lc7\" (UniqueName: \"kubernetes.io/projected/3ba9f103-c225-4a1f-9fff-278e8c987189-kube-api-access-r9lc7\") pod \"community-operators-825vs\" (UID: \"3ba9f103-c225-4a1f-9fff-278e8c987189\") " pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:46 crc kubenswrapper[4982]: I1205 19:50:46.822964 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:47 crc kubenswrapper[4982]: I1205 19:50:47.449321 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-825vs"] Dec 05 19:50:48 crc kubenswrapper[4982]: I1205 19:50:48.369471 4982 generic.go:334] "Generic (PLEG): container finished" podID="3ba9f103-c225-4a1f-9fff-278e8c987189" containerID="8e8dab646badaaf053962926d7d406fb0863bc0d394a393bda44fc19b5f3714d" exitCode=0 Dec 05 19:50:48 crc kubenswrapper[4982]: I1205 19:50:48.369550 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-825vs" event={"ID":"3ba9f103-c225-4a1f-9fff-278e8c987189","Type":"ContainerDied","Data":"8e8dab646badaaf053962926d7d406fb0863bc0d394a393bda44fc19b5f3714d"} Dec 05 19:50:48 crc kubenswrapper[4982]: I1205 19:50:48.371449 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-825vs" event={"ID":"3ba9f103-c225-4a1f-9fff-278e8c987189","Type":"ContainerStarted","Data":"f2dc5699e762adf99bd4016d83007e38b52cc869b04749852fa685c878e19cd9"} Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.385439 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-825vs" event={"ID":"3ba9f103-c225-4a1f-9fff-278e8c987189","Type":"ContainerStarted","Data":"cb45446e958898ef7e0ea94ec3cce2886354835880252f2e34795880f89bca09"} Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.685176 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9msrp"] Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.687600 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.705085 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9msrp"] Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.825242 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be393da1-8b0b-44db-a68d-5724e5a26ff4-utilities\") pod \"certified-operators-9msrp\" (UID: \"be393da1-8b0b-44db-a68d-5724e5a26ff4\") " pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.825740 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be393da1-8b0b-44db-a68d-5724e5a26ff4-catalog-content\") pod \"certified-operators-9msrp\" (UID: \"be393da1-8b0b-44db-a68d-5724e5a26ff4\") " pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.825804 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8s6r\" (UniqueName: \"kubernetes.io/projected/be393da1-8b0b-44db-a68d-5724e5a26ff4-kube-api-access-d8s6r\") pod \"certified-operators-9msrp\" (UID: \"be393da1-8b0b-44db-a68d-5724e5a26ff4\") " pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.927345 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be393da1-8b0b-44db-a68d-5724e5a26ff4-utilities\") pod \"certified-operators-9msrp\" (UID: \"be393da1-8b0b-44db-a68d-5724e5a26ff4\") " pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.927455 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be393da1-8b0b-44db-a68d-5724e5a26ff4-catalog-content\") pod \"certified-operators-9msrp\" (UID: \"be393da1-8b0b-44db-a68d-5724e5a26ff4\") " pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.927495 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8s6r\" (UniqueName: \"kubernetes.io/projected/be393da1-8b0b-44db-a68d-5724e5a26ff4-kube-api-access-d8s6r\") pod \"certified-operators-9msrp\" (UID: \"be393da1-8b0b-44db-a68d-5724e5a26ff4\") " pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.927924 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be393da1-8b0b-44db-a68d-5724e5a26ff4-utilities\") pod \"certified-operators-9msrp\" (UID: \"be393da1-8b0b-44db-a68d-5724e5a26ff4\") " pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.928070 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be393da1-8b0b-44db-a68d-5724e5a26ff4-catalog-content\") pod \"certified-operators-9msrp\" (UID: \"be393da1-8b0b-44db-a68d-5724e5a26ff4\") " pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:50:49 crc kubenswrapper[4982]: I1205 19:50:49.953295 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8s6r\" (UniqueName: \"kubernetes.io/projected/be393da1-8b0b-44db-a68d-5724e5a26ff4-kube-api-access-d8s6r\") pod \"certified-operators-9msrp\" (UID: \"be393da1-8b0b-44db-a68d-5724e5a26ff4\") " pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:50:50 crc kubenswrapper[4982]: I1205 19:50:50.007572 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:50:50 crc kubenswrapper[4982]: I1205 19:50:50.431633 4982 generic.go:334] "Generic (PLEG): container finished" podID="3ba9f103-c225-4a1f-9fff-278e8c987189" containerID="cb45446e958898ef7e0ea94ec3cce2886354835880252f2e34795880f89bca09" exitCode=0 Dec 05 19:50:50 crc kubenswrapper[4982]: I1205 19:50:50.431927 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-825vs" event={"ID":"3ba9f103-c225-4a1f-9fff-278e8c987189","Type":"ContainerDied","Data":"cb45446e958898ef7e0ea94ec3cce2886354835880252f2e34795880f89bca09"} Dec 05 19:50:50 crc kubenswrapper[4982]: I1205 19:50:50.535185 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9msrp"] Dec 05 19:50:50 crc kubenswrapper[4982]: I1205 19:50:50.916565 4982 scope.go:117] "RemoveContainer" containerID="b74558ece5d857994012c3cafb5f6f4067adab752c1c94f8817977d6bd7a6fb7" Dec 05 19:50:51 crc kubenswrapper[4982]: I1205 19:50:51.445457 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-825vs" event={"ID":"3ba9f103-c225-4a1f-9fff-278e8c987189","Type":"ContainerStarted","Data":"cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1"} Dec 05 19:50:51 crc kubenswrapper[4982]: I1205 19:50:51.447758 4982 generic.go:334] "Generic (PLEG): container finished" podID="be393da1-8b0b-44db-a68d-5724e5a26ff4" containerID="b4d442e428a6b114c1a4036664688af61299af73e2ec268cb38327c62ba7fc39" exitCode=0 Dec 05 19:50:51 crc kubenswrapper[4982]: I1205 19:50:51.447803 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9msrp" event={"ID":"be393da1-8b0b-44db-a68d-5724e5a26ff4","Type":"ContainerDied","Data":"b4d442e428a6b114c1a4036664688af61299af73e2ec268cb38327c62ba7fc39"} Dec 05 19:50:51 crc kubenswrapper[4982]: I1205 19:50:51.447830 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9msrp" event={"ID":"be393da1-8b0b-44db-a68d-5724e5a26ff4","Type":"ContainerStarted","Data":"c5c91829fdac8ed34188075b068e31677ef7a76edbd5839d3b69ad9668411031"} Dec 05 19:50:51 crc kubenswrapper[4982]: I1205 19:50:51.471766 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-825vs" podStartSLOduration=3.002809404 podStartE2EDuration="5.4717438s" podCreationTimestamp="2025-12-05 19:50:46 +0000 UTC" firstStartedPulling="2025-12-05 19:50:48.37138903 +0000 UTC m=+2227.253275035" lastFinishedPulling="2025-12-05 19:50:50.840323436 +0000 UTC m=+2229.722209431" observedRunningTime="2025-12-05 19:50:51.46306608 +0000 UTC m=+2230.344952075" watchObservedRunningTime="2025-12-05 19:50:51.4717438 +0000 UTC m=+2230.353629795" Dec 05 19:50:52 crc kubenswrapper[4982]: I1205 19:50:52.461438 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9msrp" event={"ID":"be393da1-8b0b-44db-a68d-5724e5a26ff4","Type":"ContainerStarted","Data":"bc547801122efd1985ffd9f75b8b92f8c528b9144397b14dd76497008fed44ed"} Dec 05 19:50:53 crc kubenswrapper[4982]: I1205 19:50:53.470941 4982 generic.go:334] "Generic (PLEG): container finished" podID="be393da1-8b0b-44db-a68d-5724e5a26ff4" containerID="bc547801122efd1985ffd9f75b8b92f8c528b9144397b14dd76497008fed44ed" exitCode=0 Dec 05 19:50:53 crc kubenswrapper[4982]: I1205 19:50:53.470986 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9msrp" event={"ID":"be393da1-8b0b-44db-a68d-5724e5a26ff4","Type":"ContainerDied","Data":"bc547801122efd1985ffd9f75b8b92f8c528b9144397b14dd76497008fed44ed"} Dec 05 19:50:54 crc kubenswrapper[4982]: I1205 19:50:54.481034 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9msrp" event={"ID":"be393da1-8b0b-44db-a68d-5724e5a26ff4","Type":"ContainerStarted","Data":"5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f"} Dec 05 19:50:54 crc kubenswrapper[4982]: I1205 19:50:54.499586 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9msrp" podStartSLOduration=3.086326478 podStartE2EDuration="5.499569872s" podCreationTimestamp="2025-12-05 19:50:49 +0000 UTC" firstStartedPulling="2025-12-05 19:50:51.449355592 +0000 UTC m=+2230.331241598" lastFinishedPulling="2025-12-05 19:50:53.862598987 +0000 UTC m=+2232.744484992" observedRunningTime="2025-12-05 19:50:54.49437194 +0000 UTC m=+2233.376257935" watchObservedRunningTime="2025-12-05 19:50:54.499569872 +0000 UTC m=+2233.381455867" Dec 05 19:50:56 crc kubenswrapper[4982]: I1205 19:50:56.823625 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:56 crc kubenswrapper[4982]: I1205 19:50:56.824199 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:56 crc kubenswrapper[4982]: I1205 19:50:56.879236 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:57 crc kubenswrapper[4982]: I1205 19:50:57.552294 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-825vs" Dec 05 19:50:58 crc kubenswrapper[4982]: I1205 19:50:58.670158 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-825vs"] Dec 05 19:50:59 crc kubenswrapper[4982]: I1205 19:50:59.529300 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-825vs" podUID="3ba9f103-c225-4a1f-9fff-278e8c987189" containerName="registry-server" containerID="cri-o://cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1" gracePeriod=2 Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.008334 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.008718 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.064955 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.092535 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-825vs" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.193210 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ba9f103-c225-4a1f-9fff-278e8c987189-utilities\") pod \"3ba9f103-c225-4a1f-9fff-278e8c987189\" (UID: \"3ba9f103-c225-4a1f-9fff-278e8c987189\") " Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.193523 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ba9f103-c225-4a1f-9fff-278e8c987189-catalog-content\") pod \"3ba9f103-c225-4a1f-9fff-278e8c987189\" (UID: \"3ba9f103-c225-4a1f-9fff-278e8c987189\") " Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.194054 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ba9f103-c225-4a1f-9fff-278e8c987189-utilities" (OuterVolumeSpecName: "utilities") pod "3ba9f103-c225-4a1f-9fff-278e8c987189" (UID: "3ba9f103-c225-4a1f-9fff-278e8c987189"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.194704 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9lc7\" (UniqueName: \"kubernetes.io/projected/3ba9f103-c225-4a1f-9fff-278e8c987189-kube-api-access-r9lc7\") pod \"3ba9f103-c225-4a1f-9fff-278e8c987189\" (UID: \"3ba9f103-c225-4a1f-9fff-278e8c987189\") " Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.195611 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3ba9f103-c225-4a1f-9fff-278e8c987189-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.200662 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ba9f103-c225-4a1f-9fff-278e8c987189-kube-api-access-r9lc7" (OuterVolumeSpecName: "kube-api-access-r9lc7") pod "3ba9f103-c225-4a1f-9fff-278e8c987189" (UID: "3ba9f103-c225-4a1f-9fff-278e8c987189"). InnerVolumeSpecName "kube-api-access-r9lc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.242022 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ba9f103-c225-4a1f-9fff-278e8c987189-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3ba9f103-c225-4a1f-9fff-278e8c987189" (UID: "3ba9f103-c225-4a1f-9fff-278e8c987189"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.297479 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9lc7\" (UniqueName: \"kubernetes.io/projected/3ba9f103-c225-4a1f-9fff-278e8c987189-kube-api-access-r9lc7\") on node \"crc\" DevicePath \"\"" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.297513 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3ba9f103-c225-4a1f-9fff-278e8c987189-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.568401 4982 generic.go:334] "Generic (PLEG): container finished" podID="3ba9f103-c225-4a1f-9fff-278e8c987189" containerID="cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1" exitCode=0 Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.568464 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-825vs" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.568513 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-825vs" event={"ID":"3ba9f103-c225-4a1f-9fff-278e8c987189","Type":"ContainerDied","Data":"cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1"} Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.568542 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-825vs" event={"ID":"3ba9f103-c225-4a1f-9fff-278e8c987189","Type":"ContainerDied","Data":"f2dc5699e762adf99bd4016d83007e38b52cc869b04749852fa685c878e19cd9"} Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.568559 4982 scope.go:117] "RemoveContainer" containerID="cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.628211 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-825vs"] Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.658090 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-825vs"] Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.662961 4982 scope.go:117] "RemoveContainer" containerID="cb45446e958898ef7e0ea94ec3cce2886354835880252f2e34795880f89bca09" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.706958 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.708735 4982 scope.go:117] "RemoveContainer" containerID="8e8dab646badaaf053962926d7d406fb0863bc0d394a393bda44fc19b5f3714d" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.755470 4982 scope.go:117] "RemoveContainer" containerID="cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1" Dec 05 19:51:00 crc kubenswrapper[4982]: E1205 19:51:00.758670 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1\": container with ID starting with cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1 not found: ID does not exist" containerID="cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.758729 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1"} err="failed to get container status \"cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1\": rpc error: code = NotFound desc = could not find container \"cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1\": container with ID starting with cec61e6a581f3fcb4020534192dfb8054359a9a48656b356f62237601fe746e1 not found: ID does not exist" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.758756 4982 scope.go:117] "RemoveContainer" containerID="cb45446e958898ef7e0ea94ec3cce2886354835880252f2e34795880f89bca09" Dec 05 19:51:00 crc kubenswrapper[4982]: E1205 19:51:00.760337 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb45446e958898ef7e0ea94ec3cce2886354835880252f2e34795880f89bca09\": container with ID starting with cb45446e958898ef7e0ea94ec3cce2886354835880252f2e34795880f89bca09 not found: ID does not exist" containerID="cb45446e958898ef7e0ea94ec3cce2886354835880252f2e34795880f89bca09" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.760443 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb45446e958898ef7e0ea94ec3cce2886354835880252f2e34795880f89bca09"} err="failed to get container status \"cb45446e958898ef7e0ea94ec3cce2886354835880252f2e34795880f89bca09\": rpc error: code = NotFound desc = could not find container \"cb45446e958898ef7e0ea94ec3cce2886354835880252f2e34795880f89bca09\": container with ID starting with cb45446e958898ef7e0ea94ec3cce2886354835880252f2e34795880f89bca09 not found: ID does not exist" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.760525 4982 scope.go:117] "RemoveContainer" containerID="8e8dab646badaaf053962926d7d406fb0863bc0d394a393bda44fc19b5f3714d" Dec 05 19:51:00 crc kubenswrapper[4982]: E1205 19:51:00.760881 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e8dab646badaaf053962926d7d406fb0863bc0d394a393bda44fc19b5f3714d\": container with ID starting with 8e8dab646badaaf053962926d7d406fb0863bc0d394a393bda44fc19b5f3714d not found: ID does not exist" containerID="8e8dab646badaaf053962926d7d406fb0863bc0d394a393bda44fc19b5f3714d" Dec 05 19:51:00 crc kubenswrapper[4982]: I1205 19:51:00.760916 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e8dab646badaaf053962926d7d406fb0863bc0d394a393bda44fc19b5f3714d"} err="failed to get container status \"8e8dab646badaaf053962926d7d406fb0863bc0d394a393bda44fc19b5f3714d\": rpc error: code = NotFound desc = could not find container \"8e8dab646badaaf053962926d7d406fb0863bc0d394a393bda44fc19b5f3714d\": container with ID starting with 8e8dab646badaaf053962926d7d406fb0863bc0d394a393bda44fc19b5f3714d not found: ID does not exist" Dec 05 19:51:01 crc kubenswrapper[4982]: I1205 19:51:01.414224 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ba9f103-c225-4a1f-9fff-278e8c987189" path="/var/lib/kubelet/pods/3ba9f103-c225-4a1f-9fff-278e8c987189/volumes" Dec 05 19:51:02 crc kubenswrapper[4982]: I1205 19:51:02.471650 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9msrp"] Dec 05 19:51:02 crc kubenswrapper[4982]: I1205 19:51:02.591061 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9msrp" podUID="be393da1-8b0b-44db-a68d-5724e5a26ff4" containerName="registry-server" containerID="cri-o://5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f" gracePeriod=2 Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.050807 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.175113 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be393da1-8b0b-44db-a68d-5724e5a26ff4-catalog-content\") pod \"be393da1-8b0b-44db-a68d-5724e5a26ff4\" (UID: \"be393da1-8b0b-44db-a68d-5724e5a26ff4\") " Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.175351 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8s6r\" (UniqueName: \"kubernetes.io/projected/be393da1-8b0b-44db-a68d-5724e5a26ff4-kube-api-access-d8s6r\") pod \"be393da1-8b0b-44db-a68d-5724e5a26ff4\" (UID: \"be393da1-8b0b-44db-a68d-5724e5a26ff4\") " Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.175501 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be393da1-8b0b-44db-a68d-5724e5a26ff4-utilities\") pod \"be393da1-8b0b-44db-a68d-5724e5a26ff4\" (UID: \"be393da1-8b0b-44db-a68d-5724e5a26ff4\") " Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.176278 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be393da1-8b0b-44db-a68d-5724e5a26ff4-utilities" (OuterVolumeSpecName: "utilities") pod "be393da1-8b0b-44db-a68d-5724e5a26ff4" (UID: "be393da1-8b0b-44db-a68d-5724e5a26ff4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.180363 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be393da1-8b0b-44db-a68d-5724e5a26ff4-kube-api-access-d8s6r" (OuterVolumeSpecName: "kube-api-access-d8s6r") pod "be393da1-8b0b-44db-a68d-5724e5a26ff4" (UID: "be393da1-8b0b-44db-a68d-5724e5a26ff4"). InnerVolumeSpecName "kube-api-access-d8s6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.227854 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be393da1-8b0b-44db-a68d-5724e5a26ff4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "be393da1-8b0b-44db-a68d-5724e5a26ff4" (UID: "be393da1-8b0b-44db-a68d-5724e5a26ff4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.277860 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8s6r\" (UniqueName: \"kubernetes.io/projected/be393da1-8b0b-44db-a68d-5724e5a26ff4-kube-api-access-d8s6r\") on node \"crc\" DevicePath \"\"" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.277904 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be393da1-8b0b-44db-a68d-5724e5a26ff4-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.277917 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be393da1-8b0b-44db-a68d-5724e5a26ff4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.602516 4982 generic.go:334] "Generic (PLEG): container finished" podID="be393da1-8b0b-44db-a68d-5724e5a26ff4" containerID="5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f" exitCode=0 Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.602568 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9msrp" event={"ID":"be393da1-8b0b-44db-a68d-5724e5a26ff4","Type":"ContainerDied","Data":"5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f"} Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.602591 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9msrp" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.602606 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9msrp" event={"ID":"be393da1-8b0b-44db-a68d-5724e5a26ff4","Type":"ContainerDied","Data":"c5c91829fdac8ed34188075b068e31677ef7a76edbd5839d3b69ad9668411031"} Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.602628 4982 scope.go:117] "RemoveContainer" containerID="5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.627184 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9msrp"] Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.635222 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9msrp"] Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.635470 4982 scope.go:117] "RemoveContainer" containerID="bc547801122efd1985ffd9f75b8b92f8c528b9144397b14dd76497008fed44ed" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.664404 4982 scope.go:117] "RemoveContainer" containerID="b4d442e428a6b114c1a4036664688af61299af73e2ec268cb38327c62ba7fc39" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.705396 4982 scope.go:117] "RemoveContainer" containerID="5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f" Dec 05 19:51:03 crc kubenswrapper[4982]: E1205 19:51:03.705749 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f\": container with ID starting with 5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f not found: ID does not exist" containerID="5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.705784 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f"} err="failed to get container status \"5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f\": rpc error: code = NotFound desc = could not find container \"5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f\": container with ID starting with 5e9f81e21d6d70e7e065b2f66920374a45dc7c2ea0c20c8161f197512ef63f7f not found: ID does not exist" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.705804 4982 scope.go:117] "RemoveContainer" containerID="bc547801122efd1985ffd9f75b8b92f8c528b9144397b14dd76497008fed44ed" Dec 05 19:51:03 crc kubenswrapper[4982]: E1205 19:51:03.706155 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc547801122efd1985ffd9f75b8b92f8c528b9144397b14dd76497008fed44ed\": container with ID starting with bc547801122efd1985ffd9f75b8b92f8c528b9144397b14dd76497008fed44ed not found: ID does not exist" containerID="bc547801122efd1985ffd9f75b8b92f8c528b9144397b14dd76497008fed44ed" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.706174 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc547801122efd1985ffd9f75b8b92f8c528b9144397b14dd76497008fed44ed"} err="failed to get container status \"bc547801122efd1985ffd9f75b8b92f8c528b9144397b14dd76497008fed44ed\": rpc error: code = NotFound desc = could not find container \"bc547801122efd1985ffd9f75b8b92f8c528b9144397b14dd76497008fed44ed\": container with ID starting with bc547801122efd1985ffd9f75b8b92f8c528b9144397b14dd76497008fed44ed not found: ID does not exist" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.706187 4982 scope.go:117] "RemoveContainer" containerID="b4d442e428a6b114c1a4036664688af61299af73e2ec268cb38327c62ba7fc39" Dec 05 19:51:03 crc kubenswrapper[4982]: E1205 19:51:03.706595 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4d442e428a6b114c1a4036664688af61299af73e2ec268cb38327c62ba7fc39\": container with ID starting with b4d442e428a6b114c1a4036664688af61299af73e2ec268cb38327c62ba7fc39 not found: ID does not exist" containerID="b4d442e428a6b114c1a4036664688af61299af73e2ec268cb38327c62ba7fc39" Dec 05 19:51:03 crc kubenswrapper[4982]: I1205 19:51:03.706636 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4d442e428a6b114c1a4036664688af61299af73e2ec268cb38327c62ba7fc39"} err="failed to get container status \"b4d442e428a6b114c1a4036664688af61299af73e2ec268cb38327c62ba7fc39\": rpc error: code = NotFound desc = could not find container \"b4d442e428a6b114c1a4036664688af61299af73e2ec268cb38327c62ba7fc39\": container with ID starting with b4d442e428a6b114c1a4036664688af61299af73e2ec268cb38327c62ba7fc39 not found: ID does not exist" Dec 05 19:51:05 crc kubenswrapper[4982]: I1205 19:51:05.403205 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be393da1-8b0b-44db-a68d-5724e5a26ff4" path="/var/lib/kubelet/pods/be393da1-8b0b-44db-a68d-5724e5a26ff4/volumes" Dec 05 19:51:12 crc kubenswrapper[4982]: I1205 19:51:12.556715 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:51:12 crc kubenswrapper[4982]: I1205 19:51:12.557275 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:51:42 crc kubenswrapper[4982]: I1205 19:51:42.556665 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:51:42 crc kubenswrapper[4982]: I1205 19:51:42.557218 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:51:47 crc kubenswrapper[4982]: I1205 19:51:47.054413 4982 generic.go:334] "Generic (PLEG): container finished" podID="19ae7f76-4c93-41fe-9ac4-aead0ad360fe" containerID="8053918d8a1ae575888ae4386f42dfd91dfafc01d6b5b5f6dec8374bf51945f7" exitCode=0 Dec 05 19:51:47 crc kubenswrapper[4982]: I1205 19:51:47.054487 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" event={"ID":"19ae7f76-4c93-41fe-9ac4-aead0ad360fe","Type":"ContainerDied","Data":"8053918d8a1ae575888ae4386f42dfd91dfafc01d6b5b5f6dec8374bf51945f7"} Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.598192 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.735762 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-inventory\") pod \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.735842 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ssh-key\") pod \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.735885 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-297q5\" (UniqueName: \"kubernetes.io/projected/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-kube-api-access-297q5\") pod \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.735972 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ovncontroller-config-0\") pod \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.736729 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ovn-combined-ca-bundle\") pod \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\" (UID: \"19ae7f76-4c93-41fe-9ac4-aead0ad360fe\") " Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.741399 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-kube-api-access-297q5" (OuterVolumeSpecName: "kube-api-access-297q5") pod "19ae7f76-4c93-41fe-9ac4-aead0ad360fe" (UID: "19ae7f76-4c93-41fe-9ac4-aead0ad360fe"). InnerVolumeSpecName "kube-api-access-297q5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.741743 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "19ae7f76-4c93-41fe-9ac4-aead0ad360fe" (UID: "19ae7f76-4c93-41fe-9ac4-aead0ad360fe"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.772478 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "19ae7f76-4c93-41fe-9ac4-aead0ad360fe" (UID: "19ae7f76-4c93-41fe-9ac4-aead0ad360fe"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.778684 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "19ae7f76-4c93-41fe-9ac4-aead0ad360fe" (UID: "19ae7f76-4c93-41fe-9ac4-aead0ad360fe"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.798821 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-inventory" (OuterVolumeSpecName: "inventory") pod "19ae7f76-4c93-41fe-9ac4-aead0ad360fe" (UID: "19ae7f76-4c93-41fe-9ac4-aead0ad360fe"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.839456 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.839485 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.839506 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-297q5\" (UniqueName: \"kubernetes.io/projected/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-kube-api-access-297q5\") on node \"crc\" DevicePath \"\"" Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.839518 4982 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:51:48 crc kubenswrapper[4982]: I1205 19:51:48.839526 4982 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19ae7f76-4c93-41fe-9ac4-aead0ad360fe-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.079184 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" event={"ID":"19ae7f76-4c93-41fe-9ac4-aead0ad360fe","Type":"ContainerDied","Data":"505351efdd21c122f72de31796ac92deb96f76ebd68abbf22d829b79a187e223"} Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.079245 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="505351efdd21c122f72de31796ac92deb96f76ebd68abbf22d829b79a187e223" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.079292 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-vgqgj" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.193957 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww"] Dec 05 19:51:49 crc kubenswrapper[4982]: E1205 19:51:49.194344 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be393da1-8b0b-44db-a68d-5724e5a26ff4" containerName="registry-server" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.194356 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="be393da1-8b0b-44db-a68d-5724e5a26ff4" containerName="registry-server" Dec 05 19:51:49 crc kubenswrapper[4982]: E1205 19:51:49.194371 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ba9f103-c225-4a1f-9fff-278e8c987189" containerName="extract-utilities" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.194378 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ba9f103-c225-4a1f-9fff-278e8c987189" containerName="extract-utilities" Dec 05 19:51:49 crc kubenswrapper[4982]: E1205 19:51:49.194389 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ba9f103-c225-4a1f-9fff-278e8c987189" containerName="extract-content" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.194397 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ba9f103-c225-4a1f-9fff-278e8c987189" containerName="extract-content" Dec 05 19:51:49 crc kubenswrapper[4982]: E1205 19:51:49.194419 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be393da1-8b0b-44db-a68d-5724e5a26ff4" containerName="extract-content" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.194426 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="be393da1-8b0b-44db-a68d-5724e5a26ff4" containerName="extract-content" Dec 05 19:51:49 crc kubenswrapper[4982]: E1205 19:51:49.194436 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ba9f103-c225-4a1f-9fff-278e8c987189" containerName="registry-server" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.194442 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ba9f103-c225-4a1f-9fff-278e8c987189" containerName="registry-server" Dec 05 19:51:49 crc kubenswrapper[4982]: E1205 19:51:49.194452 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be393da1-8b0b-44db-a68d-5724e5a26ff4" containerName="extract-utilities" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.194457 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="be393da1-8b0b-44db-a68d-5724e5a26ff4" containerName="extract-utilities" Dec 05 19:51:49 crc kubenswrapper[4982]: E1205 19:51:49.194474 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19ae7f76-4c93-41fe-9ac4-aead0ad360fe" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.194481 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="19ae7f76-4c93-41fe-9ac4-aead0ad360fe" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.194650 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ba9f103-c225-4a1f-9fff-278e8c987189" containerName="registry-server" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.194658 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="be393da1-8b0b-44db-a68d-5724e5a26ff4" containerName="registry-server" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.194678 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="19ae7f76-4c93-41fe-9ac4-aead0ad360fe" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.195341 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.197583 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.197742 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.198161 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.198478 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.198628 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.199127 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.226893 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww"] Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.349665 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.349726 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.349769 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.349885 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.349920 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpdl5\" (UniqueName: \"kubernetes.io/projected/6ff78dfa-4b95-4e32-b569-08d967824332-kube-api-access-dpdl5\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.349987 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.452169 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpdl5\" (UniqueName: \"kubernetes.io/projected/6ff78dfa-4b95-4e32-b569-08d967824332-kube-api-access-dpdl5\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.452437 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.452556 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.452806 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.452947 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.453430 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.456954 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.457130 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.457294 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.457928 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.458129 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.475749 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpdl5\" (UniqueName: \"kubernetes.io/projected/6ff78dfa-4b95-4e32-b569-08d967824332-kube-api-access-dpdl5\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:49 crc kubenswrapper[4982]: I1205 19:51:49.584845 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:51:50 crc kubenswrapper[4982]: I1205 19:51:50.107190 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww"] Dec 05 19:51:50 crc kubenswrapper[4982]: I1205 19:51:50.108891 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 19:51:51 crc kubenswrapper[4982]: I1205 19:51:51.102667 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" event={"ID":"6ff78dfa-4b95-4e32-b569-08d967824332","Type":"ContainerStarted","Data":"c5648c4470336cd604bebf8ce574b14202b32701410654f8c5e352809afad97b"} Dec 05 19:51:51 crc kubenswrapper[4982]: I1205 19:51:51.103436 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" event={"ID":"6ff78dfa-4b95-4e32-b569-08d967824332","Type":"ContainerStarted","Data":"cb68d9ceb04d7ec08cf0d178374005fce59cc7b5cd2951e7502225be43af882f"} Dec 05 19:51:51 crc kubenswrapper[4982]: I1205 19:51:51.126581 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" podStartSLOduration=1.651460105 podStartE2EDuration="2.126563377s" podCreationTimestamp="2025-12-05 19:51:49 +0000 UTC" firstStartedPulling="2025-12-05 19:51:50.108668037 +0000 UTC m=+2288.990554032" lastFinishedPulling="2025-12-05 19:51:50.583771309 +0000 UTC m=+2289.465657304" observedRunningTime="2025-12-05 19:51:51.117696942 +0000 UTC m=+2289.999582937" watchObservedRunningTime="2025-12-05 19:51:51.126563377 +0000 UTC m=+2290.008449372" Dec 05 19:52:12 crc kubenswrapper[4982]: I1205 19:52:12.557206 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:52:12 crc kubenswrapper[4982]: I1205 19:52:12.557668 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 19:52:12 crc kubenswrapper[4982]: I1205 19:52:12.557708 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 19:52:12 crc kubenswrapper[4982]: I1205 19:52:12.558352 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 19:52:12 crc kubenswrapper[4982]: I1205 19:52:12.558444 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" gracePeriod=600 Dec 05 19:52:12 crc kubenswrapper[4982]: E1205 19:52:12.680247 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:52:13 crc kubenswrapper[4982]: I1205 19:52:13.297086 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" exitCode=0 Dec 05 19:52:13 crc kubenswrapper[4982]: I1205 19:52:13.297132 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf"} Dec 05 19:52:13 crc kubenswrapper[4982]: I1205 19:52:13.297189 4982 scope.go:117] "RemoveContainer" containerID="f01c1d5189425b39486d48c52bcbcc79562af7955203c5d8cee9a8137a433a91" Dec 05 19:52:13 crc kubenswrapper[4982]: I1205 19:52:13.298011 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:52:13 crc kubenswrapper[4982]: E1205 19:52:13.298456 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:52:26 crc kubenswrapper[4982]: I1205 19:52:26.391356 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:52:26 crc kubenswrapper[4982]: E1205 19:52:26.392217 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:52:39 crc kubenswrapper[4982]: I1205 19:52:39.390573 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:52:39 crc kubenswrapper[4982]: E1205 19:52:39.391423 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:52:42 crc kubenswrapper[4982]: I1205 19:52:42.610673 4982 generic.go:334] "Generic (PLEG): container finished" podID="6ff78dfa-4b95-4e32-b569-08d967824332" containerID="c5648c4470336cd604bebf8ce574b14202b32701410654f8c5e352809afad97b" exitCode=0 Dec 05 19:52:42 crc kubenswrapper[4982]: I1205 19:52:42.610742 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" event={"ID":"6ff78dfa-4b95-4e32-b569-08d967824332","Type":"ContainerDied","Data":"c5648c4470336cd604bebf8ce574b14202b32701410654f8c5e352809afad97b"} Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.117371 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.206764 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-ssh-key\") pod \"6ff78dfa-4b95-4e32-b569-08d967824332\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.206899 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-neutron-ovn-metadata-agent-neutron-config-0\") pod \"6ff78dfa-4b95-4e32-b569-08d967824332\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.206929 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-neutron-metadata-combined-ca-bundle\") pod \"6ff78dfa-4b95-4e32-b569-08d967824332\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.206994 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-inventory\") pod \"6ff78dfa-4b95-4e32-b569-08d967824332\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.207084 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-nova-metadata-neutron-config-0\") pod \"6ff78dfa-4b95-4e32-b569-08d967824332\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.207102 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpdl5\" (UniqueName: \"kubernetes.io/projected/6ff78dfa-4b95-4e32-b569-08d967824332-kube-api-access-dpdl5\") pod \"6ff78dfa-4b95-4e32-b569-08d967824332\" (UID: \"6ff78dfa-4b95-4e32-b569-08d967824332\") " Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.212429 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ff78dfa-4b95-4e32-b569-08d967824332-kube-api-access-dpdl5" (OuterVolumeSpecName: "kube-api-access-dpdl5") pod "6ff78dfa-4b95-4e32-b569-08d967824332" (UID: "6ff78dfa-4b95-4e32-b569-08d967824332"). InnerVolumeSpecName "kube-api-access-dpdl5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.212886 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "6ff78dfa-4b95-4e32-b569-08d967824332" (UID: "6ff78dfa-4b95-4e32-b569-08d967824332"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.236014 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "6ff78dfa-4b95-4e32-b569-08d967824332" (UID: "6ff78dfa-4b95-4e32-b569-08d967824332"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.236089 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "6ff78dfa-4b95-4e32-b569-08d967824332" (UID: "6ff78dfa-4b95-4e32-b569-08d967824332"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.238302 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "6ff78dfa-4b95-4e32-b569-08d967824332" (UID: "6ff78dfa-4b95-4e32-b569-08d967824332"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.257053 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-inventory" (OuterVolumeSpecName: "inventory") pod "6ff78dfa-4b95-4e32-b569-08d967824332" (UID: "6ff78dfa-4b95-4e32-b569-08d967824332"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.309840 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.309884 4982 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.309904 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpdl5\" (UniqueName: \"kubernetes.io/projected/6ff78dfa-4b95-4e32-b569-08d967824332-kube-api-access-dpdl5\") on node \"crc\" DevicePath \"\"" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.309918 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.309930 4982 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.309944 4982 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ff78dfa-4b95-4e32-b569-08d967824332-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.633245 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" event={"ID":"6ff78dfa-4b95-4e32-b569-08d967824332","Type":"ContainerDied","Data":"cb68d9ceb04d7ec08cf0d178374005fce59cc7b5cd2951e7502225be43af882f"} Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.633290 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb68d9ceb04d7ec08cf0d178374005fce59cc7b5cd2951e7502225be43af882f" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.633341 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.732247 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct"] Dec 05 19:52:44 crc kubenswrapper[4982]: E1205 19:52:44.732991 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ff78dfa-4b95-4e32-b569-08d967824332" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.733018 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ff78dfa-4b95-4e32-b569-08d967824332" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.733316 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ff78dfa-4b95-4e32-b569-08d967824332" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.734244 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.736861 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.737063 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.737121 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.737437 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.740305 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.755437 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct"] Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.819904 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.819988 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.820040 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.820224 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.820324 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d95gd\" (UniqueName: \"kubernetes.io/projected/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-kube-api-access-d95gd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.922532 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.922896 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.923007 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.923121 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d95gd\" (UniqueName: \"kubernetes.io/projected/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-kube-api-access-d95gd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.923733 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.927399 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.927515 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.927741 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.927816 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:44 crc kubenswrapper[4982]: I1205 19:52:44.940714 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d95gd\" (UniqueName: \"kubernetes.io/projected/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-kube-api-access-d95gd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-lbxct\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:45 crc kubenswrapper[4982]: I1205 19:52:45.056224 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:52:45 crc kubenswrapper[4982]: I1205 19:52:45.581356 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct"] Dec 05 19:52:45 crc kubenswrapper[4982]: I1205 19:52:45.645261 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" event={"ID":"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35","Type":"ContainerStarted","Data":"74b85a932c4f22333228376981af13b4db19f7138638c321582f0700a586a463"} Dec 05 19:52:46 crc kubenswrapper[4982]: I1205 19:52:46.657674 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" event={"ID":"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35","Type":"ContainerStarted","Data":"9964490eb31fdf0e0773ec847f3eba350ccc5ac63af269017ebf9037970862ae"} Dec 05 19:52:46 crc kubenswrapper[4982]: I1205 19:52:46.686811 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" podStartSLOduration=2.218232847 podStartE2EDuration="2.686794324s" podCreationTimestamp="2025-12-05 19:52:44 +0000 UTC" firstStartedPulling="2025-12-05 19:52:45.589096991 +0000 UTC m=+2344.470982996" lastFinishedPulling="2025-12-05 19:52:46.057658478 +0000 UTC m=+2344.939544473" observedRunningTime="2025-12-05 19:52:46.681054868 +0000 UTC m=+2345.562940873" watchObservedRunningTime="2025-12-05 19:52:46.686794324 +0000 UTC m=+2345.568680319" Dec 05 19:52:52 crc kubenswrapper[4982]: I1205 19:52:52.390459 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:52:52 crc kubenswrapper[4982]: E1205 19:52:52.391107 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:53:03 crc kubenswrapper[4982]: I1205 19:53:03.391333 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:53:03 crc kubenswrapper[4982]: E1205 19:53:03.392165 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:53:18 crc kubenswrapper[4982]: I1205 19:53:18.390547 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:53:18 crc kubenswrapper[4982]: E1205 19:53:18.391296 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:53:31 crc kubenswrapper[4982]: I1205 19:53:31.405864 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:53:31 crc kubenswrapper[4982]: E1205 19:53:31.407378 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:53:42 crc kubenswrapper[4982]: I1205 19:53:42.390579 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:53:42 crc kubenswrapper[4982]: E1205 19:53:42.391532 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:53:53 crc kubenswrapper[4982]: I1205 19:53:53.391096 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:53:53 crc kubenswrapper[4982]: E1205 19:53:53.392076 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:54:05 crc kubenswrapper[4982]: I1205 19:54:05.391466 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:54:05 crc kubenswrapper[4982]: E1205 19:54:05.392451 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:54:18 crc kubenswrapper[4982]: I1205 19:54:18.390715 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:54:18 crc kubenswrapper[4982]: E1205 19:54:18.391904 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.638877 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zfhq2"] Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.641449 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.660703 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfhq2"] Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.811197 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-utilities\") pod \"redhat-marketplace-zfhq2\" (UID: \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\") " pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.811358 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tljzm\" (UniqueName: \"kubernetes.io/projected/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-kube-api-access-tljzm\") pod \"redhat-marketplace-zfhq2\" (UID: \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\") " pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.811445 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-catalog-content\") pod \"redhat-marketplace-zfhq2\" (UID: \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\") " pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.913352 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-catalog-content\") pod \"redhat-marketplace-zfhq2\" (UID: \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\") " pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.913499 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-utilities\") pod \"redhat-marketplace-zfhq2\" (UID: \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\") " pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.913574 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tljzm\" (UniqueName: \"kubernetes.io/projected/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-kube-api-access-tljzm\") pod \"redhat-marketplace-zfhq2\" (UID: \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\") " pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.913960 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-utilities\") pod \"redhat-marketplace-zfhq2\" (UID: \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\") " pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.914105 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-catalog-content\") pod \"redhat-marketplace-zfhq2\" (UID: \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\") " pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.944242 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tljzm\" (UniqueName: \"kubernetes.io/projected/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-kube-api-access-tljzm\") pod \"redhat-marketplace-zfhq2\" (UID: \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\") " pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:26 crc kubenswrapper[4982]: I1205 19:54:26.969115 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:27 crc kubenswrapper[4982]: I1205 19:54:27.484223 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfhq2"] Dec 05 19:54:27 crc kubenswrapper[4982]: I1205 19:54:27.713947 4982 generic.go:334] "Generic (PLEG): container finished" podID="4a9c704a-1018-4968-a4be-8c0a1b80d3d0" containerID="62d5a6adab5b7847ff07b6be9edc6ef4ec6a002183c6e28ba36d713344eaa46e" exitCode=0 Dec 05 19:54:27 crc kubenswrapper[4982]: I1205 19:54:27.714054 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfhq2" event={"ID":"4a9c704a-1018-4968-a4be-8c0a1b80d3d0","Type":"ContainerDied","Data":"62d5a6adab5b7847ff07b6be9edc6ef4ec6a002183c6e28ba36d713344eaa46e"} Dec 05 19:54:27 crc kubenswrapper[4982]: I1205 19:54:27.714290 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfhq2" event={"ID":"4a9c704a-1018-4968-a4be-8c0a1b80d3d0","Type":"ContainerStarted","Data":"3986398f21ca29b12273942d61464e6f8342946940e7308dd759d6caa7f63395"} Dec 05 19:54:28 crc kubenswrapper[4982]: I1205 19:54:28.729340 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfhq2" event={"ID":"4a9c704a-1018-4968-a4be-8c0a1b80d3d0","Type":"ContainerStarted","Data":"5eebda218382dfd67cfd1ea5599e8cd4a09351fe08bbbb3f676adc9ee05d0f1a"} Dec 05 19:54:29 crc kubenswrapper[4982]: I1205 19:54:29.391110 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:54:29 crc kubenswrapper[4982]: E1205 19:54:29.391436 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:54:29 crc kubenswrapper[4982]: I1205 19:54:29.740395 4982 generic.go:334] "Generic (PLEG): container finished" podID="4a9c704a-1018-4968-a4be-8c0a1b80d3d0" containerID="5eebda218382dfd67cfd1ea5599e8cd4a09351fe08bbbb3f676adc9ee05d0f1a" exitCode=0 Dec 05 19:54:29 crc kubenswrapper[4982]: I1205 19:54:29.740454 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfhq2" event={"ID":"4a9c704a-1018-4968-a4be-8c0a1b80d3d0","Type":"ContainerDied","Data":"5eebda218382dfd67cfd1ea5599e8cd4a09351fe08bbbb3f676adc9ee05d0f1a"} Dec 05 19:54:30 crc kubenswrapper[4982]: I1205 19:54:30.751679 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfhq2" event={"ID":"4a9c704a-1018-4968-a4be-8c0a1b80d3d0","Type":"ContainerStarted","Data":"cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93"} Dec 05 19:54:30 crc kubenswrapper[4982]: I1205 19:54:30.782134 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zfhq2" podStartSLOduration=2.303668186 podStartE2EDuration="4.782109394s" podCreationTimestamp="2025-12-05 19:54:26 +0000 UTC" firstStartedPulling="2025-12-05 19:54:27.715770393 +0000 UTC m=+2446.597656388" lastFinishedPulling="2025-12-05 19:54:30.194211591 +0000 UTC m=+2449.076097596" observedRunningTime="2025-12-05 19:54:30.766819607 +0000 UTC m=+2449.648705622" watchObservedRunningTime="2025-12-05 19:54:30.782109394 +0000 UTC m=+2449.663995419" Dec 05 19:54:36 crc kubenswrapper[4982]: I1205 19:54:36.969550 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:36 crc kubenswrapper[4982]: I1205 19:54:36.970979 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:37 crc kubenswrapper[4982]: I1205 19:54:37.037526 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:37 crc kubenswrapper[4982]: I1205 19:54:37.891666 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:37 crc kubenswrapper[4982]: I1205 19:54:37.951386 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfhq2"] Dec 05 19:54:39 crc kubenswrapper[4982]: I1205 19:54:39.847962 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zfhq2" podUID="4a9c704a-1018-4968-a4be-8c0a1b80d3d0" containerName="registry-server" containerID="cri-o://cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93" gracePeriod=2 Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.420859 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.585597 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tljzm\" (UniqueName: \"kubernetes.io/projected/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-kube-api-access-tljzm\") pod \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\" (UID: \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\") " Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.585661 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-utilities\") pod \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\" (UID: \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\") " Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.585692 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-catalog-content\") pod \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\" (UID: \"4a9c704a-1018-4968-a4be-8c0a1b80d3d0\") " Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.587640 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-utilities" (OuterVolumeSpecName: "utilities") pod "4a9c704a-1018-4968-a4be-8c0a1b80d3d0" (UID: "4a9c704a-1018-4968-a4be-8c0a1b80d3d0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.594521 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-kube-api-access-tljzm" (OuterVolumeSpecName: "kube-api-access-tljzm") pod "4a9c704a-1018-4968-a4be-8c0a1b80d3d0" (UID: "4a9c704a-1018-4968-a4be-8c0a1b80d3d0"). InnerVolumeSpecName "kube-api-access-tljzm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.604740 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4a9c704a-1018-4968-a4be-8c0a1b80d3d0" (UID: "4a9c704a-1018-4968-a4be-8c0a1b80d3d0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.688748 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tljzm\" (UniqueName: \"kubernetes.io/projected/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-kube-api-access-tljzm\") on node \"crc\" DevicePath \"\"" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.688980 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.689042 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4a9c704a-1018-4968-a4be-8c0a1b80d3d0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.858343 4982 generic.go:334] "Generic (PLEG): container finished" podID="4a9c704a-1018-4968-a4be-8c0a1b80d3d0" containerID="cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93" exitCode=0 Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.858420 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zfhq2" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.859546 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfhq2" event={"ID":"4a9c704a-1018-4968-a4be-8c0a1b80d3d0","Type":"ContainerDied","Data":"cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93"} Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.859682 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zfhq2" event={"ID":"4a9c704a-1018-4968-a4be-8c0a1b80d3d0","Type":"ContainerDied","Data":"3986398f21ca29b12273942d61464e6f8342946940e7308dd759d6caa7f63395"} Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.859776 4982 scope.go:117] "RemoveContainer" containerID="cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.885614 4982 scope.go:117] "RemoveContainer" containerID="5eebda218382dfd67cfd1ea5599e8cd4a09351fe08bbbb3f676adc9ee05d0f1a" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.893558 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfhq2"] Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.905188 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zfhq2"] Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.920754 4982 scope.go:117] "RemoveContainer" containerID="62d5a6adab5b7847ff07b6be9edc6ef4ec6a002183c6e28ba36d713344eaa46e" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.954802 4982 scope.go:117] "RemoveContainer" containerID="cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93" Dec 05 19:54:40 crc kubenswrapper[4982]: E1205 19:54:40.955254 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93\": container with ID starting with cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93 not found: ID does not exist" containerID="cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.955291 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93"} err="failed to get container status \"cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93\": rpc error: code = NotFound desc = could not find container \"cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93\": container with ID starting with cebf859ebe766167a97ec0273d22088b1a382fea7cff95002a72a31fda531a93 not found: ID does not exist" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.955318 4982 scope.go:117] "RemoveContainer" containerID="5eebda218382dfd67cfd1ea5599e8cd4a09351fe08bbbb3f676adc9ee05d0f1a" Dec 05 19:54:40 crc kubenswrapper[4982]: E1205 19:54:40.955672 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5eebda218382dfd67cfd1ea5599e8cd4a09351fe08bbbb3f676adc9ee05d0f1a\": container with ID starting with 5eebda218382dfd67cfd1ea5599e8cd4a09351fe08bbbb3f676adc9ee05d0f1a not found: ID does not exist" containerID="5eebda218382dfd67cfd1ea5599e8cd4a09351fe08bbbb3f676adc9ee05d0f1a" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.955698 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5eebda218382dfd67cfd1ea5599e8cd4a09351fe08bbbb3f676adc9ee05d0f1a"} err="failed to get container status \"5eebda218382dfd67cfd1ea5599e8cd4a09351fe08bbbb3f676adc9ee05d0f1a\": rpc error: code = NotFound desc = could not find container \"5eebda218382dfd67cfd1ea5599e8cd4a09351fe08bbbb3f676adc9ee05d0f1a\": container with ID starting with 5eebda218382dfd67cfd1ea5599e8cd4a09351fe08bbbb3f676adc9ee05d0f1a not found: ID does not exist" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.955712 4982 scope.go:117] "RemoveContainer" containerID="62d5a6adab5b7847ff07b6be9edc6ef4ec6a002183c6e28ba36d713344eaa46e" Dec 05 19:54:40 crc kubenswrapper[4982]: E1205 19:54:40.956104 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62d5a6adab5b7847ff07b6be9edc6ef4ec6a002183c6e28ba36d713344eaa46e\": container with ID starting with 62d5a6adab5b7847ff07b6be9edc6ef4ec6a002183c6e28ba36d713344eaa46e not found: ID does not exist" containerID="62d5a6adab5b7847ff07b6be9edc6ef4ec6a002183c6e28ba36d713344eaa46e" Dec 05 19:54:40 crc kubenswrapper[4982]: I1205 19:54:40.956248 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62d5a6adab5b7847ff07b6be9edc6ef4ec6a002183c6e28ba36d713344eaa46e"} err="failed to get container status \"62d5a6adab5b7847ff07b6be9edc6ef4ec6a002183c6e28ba36d713344eaa46e\": rpc error: code = NotFound desc = could not find container \"62d5a6adab5b7847ff07b6be9edc6ef4ec6a002183c6e28ba36d713344eaa46e\": container with ID starting with 62d5a6adab5b7847ff07b6be9edc6ef4ec6a002183c6e28ba36d713344eaa46e not found: ID does not exist" Dec 05 19:54:41 crc kubenswrapper[4982]: I1205 19:54:41.399547 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:54:41 crc kubenswrapper[4982]: E1205 19:54:41.400066 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:54:41 crc kubenswrapper[4982]: I1205 19:54:41.406012 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a9c704a-1018-4968-a4be-8c0a1b80d3d0" path="/var/lib/kubelet/pods/4a9c704a-1018-4968-a4be-8c0a1b80d3d0/volumes" Dec 05 19:54:52 crc kubenswrapper[4982]: I1205 19:54:52.391037 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:54:52 crc kubenswrapper[4982]: E1205 19:54:52.393853 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:55:05 crc kubenswrapper[4982]: I1205 19:55:05.392006 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:55:05 crc kubenswrapper[4982]: E1205 19:55:05.393099 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:55:16 crc kubenswrapper[4982]: I1205 19:55:16.390117 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:55:16 crc kubenswrapper[4982]: E1205 19:55:16.390924 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:55:31 crc kubenswrapper[4982]: I1205 19:55:31.396764 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:55:31 crc kubenswrapper[4982]: E1205 19:55:31.398909 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:55:45 crc kubenswrapper[4982]: I1205 19:55:45.391114 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:55:45 crc kubenswrapper[4982]: E1205 19:55:45.392175 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:56:00 crc kubenswrapper[4982]: I1205 19:56:00.390699 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:56:00 crc kubenswrapper[4982]: E1205 19:56:00.392872 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:56:12 crc kubenswrapper[4982]: I1205 19:56:12.390358 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:56:12 crc kubenswrapper[4982]: E1205 19:56:12.391164 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:56:25 crc kubenswrapper[4982]: I1205 19:56:25.389884 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:56:25 crc kubenswrapper[4982]: E1205 19:56:25.390700 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:56:38 crc kubenswrapper[4982]: I1205 19:56:38.391071 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:56:38 crc kubenswrapper[4982]: E1205 19:56:38.392198 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:56:51 crc kubenswrapper[4982]: I1205 19:56:51.397833 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:56:51 crc kubenswrapper[4982]: E1205 19:56:51.398597 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:57:03 crc kubenswrapper[4982]: I1205 19:57:03.390231 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:57:03 crc kubenswrapper[4982]: E1205 19:57:03.390867 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 19:57:12 crc kubenswrapper[4982]: I1205 19:57:12.402177 4982 generic.go:334] "Generic (PLEG): container finished" podID="8f1fe72c-1893-4aa4-9fc9-5ab862de7c35" containerID="9964490eb31fdf0e0773ec847f3eba350ccc5ac63af269017ebf9037970862ae" exitCode=0 Dec 05 19:57:12 crc kubenswrapper[4982]: I1205 19:57:12.402307 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" event={"ID":"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35","Type":"ContainerDied","Data":"9964490eb31fdf0e0773ec847f3eba350ccc5ac63af269017ebf9037970862ae"} Dec 05 19:57:13 crc kubenswrapper[4982]: I1205 19:57:13.917784 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.032988 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-libvirt-secret-0\") pod \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.033030 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-libvirt-combined-ca-bundle\") pod \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.033095 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-inventory\") pod \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.033169 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-ssh-key\") pod \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.033235 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d95gd\" (UniqueName: \"kubernetes.io/projected/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-kube-api-access-d95gd\") pod \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\" (UID: \"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35\") " Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.038304 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "8f1fe72c-1893-4aa4-9fc9-5ab862de7c35" (UID: "8f1fe72c-1893-4aa4-9fc9-5ab862de7c35"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.048473 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-kube-api-access-d95gd" (OuterVolumeSpecName: "kube-api-access-d95gd") pod "8f1fe72c-1893-4aa4-9fc9-5ab862de7c35" (UID: "8f1fe72c-1893-4aa4-9fc9-5ab862de7c35"). InnerVolumeSpecName "kube-api-access-d95gd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.063923 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-inventory" (OuterVolumeSpecName: "inventory") pod "8f1fe72c-1893-4aa4-9fc9-5ab862de7c35" (UID: "8f1fe72c-1893-4aa4-9fc9-5ab862de7c35"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.067897 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8f1fe72c-1893-4aa4-9fc9-5ab862de7c35" (UID: "8f1fe72c-1893-4aa4-9fc9-5ab862de7c35"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.068311 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "8f1fe72c-1893-4aa4-9fc9-5ab862de7c35" (UID: "8f1fe72c-1893-4aa4-9fc9-5ab862de7c35"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.135513 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d95gd\" (UniqueName: \"kubernetes.io/projected/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-kube-api-access-d95gd\") on node \"crc\" DevicePath \"\"" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.135546 4982 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.135556 4982 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.135567 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.135575 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8f1fe72c-1893-4aa4-9fc9-5ab862de7c35-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.390309 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.425040 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" event={"ID":"8f1fe72c-1893-4aa4-9fc9-5ab862de7c35","Type":"ContainerDied","Data":"74b85a932c4f22333228376981af13b4db19f7138638c321582f0700a586a463"} Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.425087 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74b85a932c4f22333228376981af13b4db19f7138638c321582f0700a586a463" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.425099 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-lbxct" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.517020 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7"] Dec 05 19:57:14 crc kubenswrapper[4982]: E1205 19:57:14.518020 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a9c704a-1018-4968-a4be-8c0a1b80d3d0" containerName="extract-content" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.518119 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a9c704a-1018-4968-a4be-8c0a1b80d3d0" containerName="extract-content" Dec 05 19:57:14 crc kubenswrapper[4982]: E1205 19:57:14.518274 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a9c704a-1018-4968-a4be-8c0a1b80d3d0" containerName="registry-server" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.518382 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a9c704a-1018-4968-a4be-8c0a1b80d3d0" containerName="registry-server" Dec 05 19:57:14 crc kubenswrapper[4982]: E1205 19:57:14.518506 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f1fe72c-1893-4aa4-9fc9-5ab862de7c35" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.518596 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f1fe72c-1893-4aa4-9fc9-5ab862de7c35" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 19:57:14 crc kubenswrapper[4982]: E1205 19:57:14.518681 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a9c704a-1018-4968-a4be-8c0a1b80d3d0" containerName="extract-utilities" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.518743 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a9c704a-1018-4968-a4be-8c0a1b80d3d0" containerName="extract-utilities" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.519060 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f1fe72c-1893-4aa4-9fc9-5ab862de7c35" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.519206 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a9c704a-1018-4968-a4be-8c0a1b80d3d0" containerName="registry-server" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.520807 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.524089 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.525019 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.525519 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.526042 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.526213 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.526551 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.528588 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.536376 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7"] Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.647699 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.647783 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.647973 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vxw5m\" (UniqueName: \"kubernetes.io/projected/592205cb-46f4-4bc6-9329-a90e5e63400e-kube-api-access-vxw5m\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.648012 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.648042 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.648121 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.648504 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.648600 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.648670 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.750604 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.750678 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.750708 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.750759 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vxw5m\" (UniqueName: \"kubernetes.io/projected/592205cb-46f4-4bc6-9329-a90e5e63400e-kube-api-access-vxw5m\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.750777 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.750797 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.750823 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.750909 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.750938 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.752383 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.756379 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.756598 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.756888 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.757371 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.764659 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.765067 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.766348 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.767438 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vxw5m\" (UniqueName: \"kubernetes.io/projected/592205cb-46f4-4bc6-9329-a90e5e63400e-kube-api-access-vxw5m\") pod \"nova-edpm-deployment-openstack-edpm-ipam-qxxl7\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:14 crc kubenswrapper[4982]: I1205 19:57:14.851285 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 19:57:15 crc kubenswrapper[4982]: I1205 19:57:15.438799 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"461731dbe04cb67fe028db49bb8a95b2134c2a64fb1c09002ce2e414c01ad2d4"} Dec 05 19:57:15 crc kubenswrapper[4982]: I1205 19:57:15.495054 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 19:57:15 crc kubenswrapper[4982]: I1205 19:57:15.507939 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7"] Dec 05 19:57:16 crc kubenswrapper[4982]: I1205 19:57:16.449500 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" event={"ID":"592205cb-46f4-4bc6-9329-a90e5e63400e","Type":"ContainerStarted","Data":"79c9f013ceb70073f939ff4f53b8038f5f10c1531bf69b4a28271a14c3faf74e"} Dec 05 19:57:16 crc kubenswrapper[4982]: I1205 19:57:16.450042 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" event={"ID":"592205cb-46f4-4bc6-9329-a90e5e63400e","Type":"ContainerStarted","Data":"9d6906ff5107edb990a885ba69d8b19dabd23803084ca362d4a77634c54c4d4b"} Dec 05 19:57:16 crc kubenswrapper[4982]: I1205 19:57:16.467614 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" podStartSLOduration=2.004593733 podStartE2EDuration="2.467599005s" podCreationTimestamp="2025-12-05 19:57:14 +0000 UTC" firstStartedPulling="2025-12-05 19:57:15.494745935 +0000 UTC m=+2614.376631940" lastFinishedPulling="2025-12-05 19:57:15.957751217 +0000 UTC m=+2614.839637212" observedRunningTime="2025-12-05 19:57:16.466421085 +0000 UTC m=+2615.348307080" watchObservedRunningTime="2025-12-05 19:57:16.467599005 +0000 UTC m=+2615.349485000" Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.448775 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9n8q2"] Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.451973 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.462552 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9n8q2"] Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.540435 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwzr6\" (UniqueName: \"kubernetes.io/projected/421ab850-bc97-48cd-8a76-49f3ecbcd721-kube-api-access-dwzr6\") pod \"redhat-operators-9n8q2\" (UID: \"421ab850-bc97-48cd-8a76-49f3ecbcd721\") " pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.540884 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/421ab850-bc97-48cd-8a76-49f3ecbcd721-catalog-content\") pod \"redhat-operators-9n8q2\" (UID: \"421ab850-bc97-48cd-8a76-49f3ecbcd721\") " pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.541454 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/421ab850-bc97-48cd-8a76-49f3ecbcd721-utilities\") pod \"redhat-operators-9n8q2\" (UID: \"421ab850-bc97-48cd-8a76-49f3ecbcd721\") " pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.643963 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/421ab850-bc97-48cd-8a76-49f3ecbcd721-catalog-content\") pod \"redhat-operators-9n8q2\" (UID: \"421ab850-bc97-48cd-8a76-49f3ecbcd721\") " pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.644319 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/421ab850-bc97-48cd-8a76-49f3ecbcd721-utilities\") pod \"redhat-operators-9n8q2\" (UID: \"421ab850-bc97-48cd-8a76-49f3ecbcd721\") " pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.644478 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwzr6\" (UniqueName: \"kubernetes.io/projected/421ab850-bc97-48cd-8a76-49f3ecbcd721-kube-api-access-dwzr6\") pod \"redhat-operators-9n8q2\" (UID: \"421ab850-bc97-48cd-8a76-49f3ecbcd721\") " pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.644526 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/421ab850-bc97-48cd-8a76-49f3ecbcd721-catalog-content\") pod \"redhat-operators-9n8q2\" (UID: \"421ab850-bc97-48cd-8a76-49f3ecbcd721\") " pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.644809 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/421ab850-bc97-48cd-8a76-49f3ecbcd721-utilities\") pod \"redhat-operators-9n8q2\" (UID: \"421ab850-bc97-48cd-8a76-49f3ecbcd721\") " pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.663880 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwzr6\" (UniqueName: \"kubernetes.io/projected/421ab850-bc97-48cd-8a76-49f3ecbcd721-kube-api-access-dwzr6\") pod \"redhat-operators-9n8q2\" (UID: \"421ab850-bc97-48cd-8a76-49f3ecbcd721\") " pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:58:50 crc kubenswrapper[4982]: I1205 19:58:50.781298 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:58:51 crc kubenswrapper[4982]: I1205 19:58:51.275518 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9n8q2"] Dec 05 19:58:51 crc kubenswrapper[4982]: I1205 19:58:51.642570 4982 generic.go:334] "Generic (PLEG): container finished" podID="421ab850-bc97-48cd-8a76-49f3ecbcd721" containerID="b6d6285d9e8b4044f35f72cd72ced36dc22f29949aaf5d3f5eef034d2b7f913f" exitCode=0 Dec 05 19:58:51 crc kubenswrapper[4982]: I1205 19:58:51.642611 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n8q2" event={"ID":"421ab850-bc97-48cd-8a76-49f3ecbcd721","Type":"ContainerDied","Data":"b6d6285d9e8b4044f35f72cd72ced36dc22f29949aaf5d3f5eef034d2b7f913f"} Dec 05 19:58:51 crc kubenswrapper[4982]: I1205 19:58:51.642863 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n8q2" event={"ID":"421ab850-bc97-48cd-8a76-49f3ecbcd721","Type":"ContainerStarted","Data":"f79b489412192bf476dc537538c6a9cbc63695ee657b04fb226cf9baad574a97"} Dec 05 19:58:52 crc kubenswrapper[4982]: I1205 19:58:52.653392 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n8q2" event={"ID":"421ab850-bc97-48cd-8a76-49f3ecbcd721","Type":"ContainerStarted","Data":"9b90d217d0aa5f87becbd1110f2decc1ee11479dc6b76c1ba31b03753b563a6c"} Dec 05 19:58:54 crc kubenswrapper[4982]: I1205 19:58:54.675317 4982 generic.go:334] "Generic (PLEG): container finished" podID="421ab850-bc97-48cd-8a76-49f3ecbcd721" containerID="9b90d217d0aa5f87becbd1110f2decc1ee11479dc6b76c1ba31b03753b563a6c" exitCode=0 Dec 05 19:58:54 crc kubenswrapper[4982]: I1205 19:58:54.675406 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n8q2" event={"ID":"421ab850-bc97-48cd-8a76-49f3ecbcd721","Type":"ContainerDied","Data":"9b90d217d0aa5f87becbd1110f2decc1ee11479dc6b76c1ba31b03753b563a6c"} Dec 05 19:58:55 crc kubenswrapper[4982]: I1205 19:58:55.696815 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n8q2" event={"ID":"421ab850-bc97-48cd-8a76-49f3ecbcd721","Type":"ContainerStarted","Data":"b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66"} Dec 05 19:58:55 crc kubenswrapper[4982]: I1205 19:58:55.716354 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9n8q2" podStartSLOduration=2.185733587 podStartE2EDuration="5.716318291s" podCreationTimestamp="2025-12-05 19:58:50 +0000 UTC" firstStartedPulling="2025-12-05 19:58:51.644598877 +0000 UTC m=+2710.526484872" lastFinishedPulling="2025-12-05 19:58:55.175183581 +0000 UTC m=+2714.057069576" observedRunningTime="2025-12-05 19:58:55.712883384 +0000 UTC m=+2714.594769379" watchObservedRunningTime="2025-12-05 19:58:55.716318291 +0000 UTC m=+2714.598204286" Dec 05 19:59:00 crc kubenswrapper[4982]: I1205 19:59:00.782606 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:59:00 crc kubenswrapper[4982]: I1205 19:59:00.797044 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:59:00 crc kubenswrapper[4982]: I1205 19:59:00.869343 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:59:01 crc kubenswrapper[4982]: I1205 19:59:01.816711 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:59:01 crc kubenswrapper[4982]: I1205 19:59:01.881763 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9n8q2"] Dec 05 19:59:03 crc kubenswrapper[4982]: I1205 19:59:03.775604 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9n8q2" podUID="421ab850-bc97-48cd-8a76-49f3ecbcd721" containerName="registry-server" containerID="cri-o://b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66" gracePeriod=2 Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.327924 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.473122 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/421ab850-bc97-48cd-8a76-49f3ecbcd721-catalog-content\") pod \"421ab850-bc97-48cd-8a76-49f3ecbcd721\" (UID: \"421ab850-bc97-48cd-8a76-49f3ecbcd721\") " Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.473261 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwzr6\" (UniqueName: \"kubernetes.io/projected/421ab850-bc97-48cd-8a76-49f3ecbcd721-kube-api-access-dwzr6\") pod \"421ab850-bc97-48cd-8a76-49f3ecbcd721\" (UID: \"421ab850-bc97-48cd-8a76-49f3ecbcd721\") " Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.473507 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/421ab850-bc97-48cd-8a76-49f3ecbcd721-utilities\") pod \"421ab850-bc97-48cd-8a76-49f3ecbcd721\" (UID: \"421ab850-bc97-48cd-8a76-49f3ecbcd721\") " Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.474637 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/421ab850-bc97-48cd-8a76-49f3ecbcd721-utilities" (OuterVolumeSpecName: "utilities") pod "421ab850-bc97-48cd-8a76-49f3ecbcd721" (UID: "421ab850-bc97-48cd-8a76-49f3ecbcd721"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.484535 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/421ab850-bc97-48cd-8a76-49f3ecbcd721-kube-api-access-dwzr6" (OuterVolumeSpecName: "kube-api-access-dwzr6") pod "421ab850-bc97-48cd-8a76-49f3ecbcd721" (UID: "421ab850-bc97-48cd-8a76-49f3ecbcd721"). InnerVolumeSpecName "kube-api-access-dwzr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.576530 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwzr6\" (UniqueName: \"kubernetes.io/projected/421ab850-bc97-48cd-8a76-49f3ecbcd721-kube-api-access-dwzr6\") on node \"crc\" DevicePath \"\"" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.576561 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/421ab850-bc97-48cd-8a76-49f3ecbcd721-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.613279 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/421ab850-bc97-48cd-8a76-49f3ecbcd721-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "421ab850-bc97-48cd-8a76-49f3ecbcd721" (UID: "421ab850-bc97-48cd-8a76-49f3ecbcd721"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.678655 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/421ab850-bc97-48cd-8a76-49f3ecbcd721-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.787441 4982 generic.go:334] "Generic (PLEG): container finished" podID="421ab850-bc97-48cd-8a76-49f3ecbcd721" containerID="b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66" exitCode=0 Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.787489 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n8q2" event={"ID":"421ab850-bc97-48cd-8a76-49f3ecbcd721","Type":"ContainerDied","Data":"b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66"} Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.787527 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9n8q2" event={"ID":"421ab850-bc97-48cd-8a76-49f3ecbcd721","Type":"ContainerDied","Data":"f79b489412192bf476dc537538c6a9cbc63695ee657b04fb226cf9baad574a97"} Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.787551 4982 scope.go:117] "RemoveContainer" containerID="b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.787610 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9n8q2" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.814090 4982 scope.go:117] "RemoveContainer" containerID="9b90d217d0aa5f87becbd1110f2decc1ee11479dc6b76c1ba31b03753b563a6c" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.832478 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9n8q2"] Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.842644 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9n8q2"] Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.857908 4982 scope.go:117] "RemoveContainer" containerID="b6d6285d9e8b4044f35f72cd72ced36dc22f29949aaf5d3f5eef034d2b7f913f" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.925468 4982 scope.go:117] "RemoveContainer" containerID="b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66" Dec 05 19:59:04 crc kubenswrapper[4982]: E1205 19:59:04.926130 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66\": container with ID starting with b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66 not found: ID does not exist" containerID="b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.926184 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66"} err="failed to get container status \"b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66\": rpc error: code = NotFound desc = could not find container \"b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66\": container with ID starting with b3007d85e01334aff477ed568621ff1cd2544dc8eb6ef28e7e81630d293fbc66 not found: ID does not exist" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.926206 4982 scope.go:117] "RemoveContainer" containerID="9b90d217d0aa5f87becbd1110f2decc1ee11479dc6b76c1ba31b03753b563a6c" Dec 05 19:59:04 crc kubenswrapper[4982]: E1205 19:59:04.926445 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b90d217d0aa5f87becbd1110f2decc1ee11479dc6b76c1ba31b03753b563a6c\": container with ID starting with 9b90d217d0aa5f87becbd1110f2decc1ee11479dc6b76c1ba31b03753b563a6c not found: ID does not exist" containerID="9b90d217d0aa5f87becbd1110f2decc1ee11479dc6b76c1ba31b03753b563a6c" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.926477 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b90d217d0aa5f87becbd1110f2decc1ee11479dc6b76c1ba31b03753b563a6c"} err="failed to get container status \"9b90d217d0aa5f87becbd1110f2decc1ee11479dc6b76c1ba31b03753b563a6c\": rpc error: code = NotFound desc = could not find container \"9b90d217d0aa5f87becbd1110f2decc1ee11479dc6b76c1ba31b03753b563a6c\": container with ID starting with 9b90d217d0aa5f87becbd1110f2decc1ee11479dc6b76c1ba31b03753b563a6c not found: ID does not exist" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.926495 4982 scope.go:117] "RemoveContainer" containerID="b6d6285d9e8b4044f35f72cd72ced36dc22f29949aaf5d3f5eef034d2b7f913f" Dec 05 19:59:04 crc kubenswrapper[4982]: E1205 19:59:04.926688 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6d6285d9e8b4044f35f72cd72ced36dc22f29949aaf5d3f5eef034d2b7f913f\": container with ID starting with b6d6285d9e8b4044f35f72cd72ced36dc22f29949aaf5d3f5eef034d2b7f913f not found: ID does not exist" containerID="b6d6285d9e8b4044f35f72cd72ced36dc22f29949aaf5d3f5eef034d2b7f913f" Dec 05 19:59:04 crc kubenswrapper[4982]: I1205 19:59:04.926707 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6d6285d9e8b4044f35f72cd72ced36dc22f29949aaf5d3f5eef034d2b7f913f"} err="failed to get container status \"b6d6285d9e8b4044f35f72cd72ced36dc22f29949aaf5d3f5eef034d2b7f913f\": rpc error: code = NotFound desc = could not find container \"b6d6285d9e8b4044f35f72cd72ced36dc22f29949aaf5d3f5eef034d2b7f913f\": container with ID starting with b6d6285d9e8b4044f35f72cd72ced36dc22f29949aaf5d3f5eef034d2b7f913f not found: ID does not exist" Dec 05 19:59:05 crc kubenswrapper[4982]: I1205 19:59:05.402275 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="421ab850-bc97-48cd-8a76-49f3ecbcd721" path="/var/lib/kubelet/pods/421ab850-bc97-48cd-8a76-49f3ecbcd721/volumes" Dec 05 19:59:42 crc kubenswrapper[4982]: I1205 19:59:42.557602 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 19:59:42 crc kubenswrapper[4982]: I1205 19:59:42.558137 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.163797 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm"] Dec 05 20:00:00 crc kubenswrapper[4982]: E1205 20:00:00.164766 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="421ab850-bc97-48cd-8a76-49f3ecbcd721" containerName="registry-server" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.164783 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="421ab850-bc97-48cd-8a76-49f3ecbcd721" containerName="registry-server" Dec 05 20:00:00 crc kubenswrapper[4982]: E1205 20:00:00.164809 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="421ab850-bc97-48cd-8a76-49f3ecbcd721" containerName="extract-utilities" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.164816 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="421ab850-bc97-48cd-8a76-49f3ecbcd721" containerName="extract-utilities" Dec 05 20:00:00 crc kubenswrapper[4982]: E1205 20:00:00.164836 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="421ab850-bc97-48cd-8a76-49f3ecbcd721" containerName="extract-content" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.164848 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="421ab850-bc97-48cd-8a76-49f3ecbcd721" containerName="extract-content" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.165073 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="421ab850-bc97-48cd-8a76-49f3ecbcd721" containerName="registry-server" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.165881 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.168098 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.168286 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.178529 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm"] Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.216657 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/02d8079d-e66d-4b57-808f-72565ebc5067-config-volume\") pod \"collect-profiles-29416080-fmwrm\" (UID: \"02d8079d-e66d-4b57-808f-72565ebc5067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.216775 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk4fp\" (UniqueName: \"kubernetes.io/projected/02d8079d-e66d-4b57-808f-72565ebc5067-kube-api-access-pk4fp\") pod \"collect-profiles-29416080-fmwrm\" (UID: \"02d8079d-e66d-4b57-808f-72565ebc5067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.216801 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/02d8079d-e66d-4b57-808f-72565ebc5067-secret-volume\") pod \"collect-profiles-29416080-fmwrm\" (UID: \"02d8079d-e66d-4b57-808f-72565ebc5067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.318714 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/02d8079d-e66d-4b57-808f-72565ebc5067-config-volume\") pod \"collect-profiles-29416080-fmwrm\" (UID: \"02d8079d-e66d-4b57-808f-72565ebc5067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.319187 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk4fp\" (UniqueName: \"kubernetes.io/projected/02d8079d-e66d-4b57-808f-72565ebc5067-kube-api-access-pk4fp\") pod \"collect-profiles-29416080-fmwrm\" (UID: \"02d8079d-e66d-4b57-808f-72565ebc5067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.319230 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/02d8079d-e66d-4b57-808f-72565ebc5067-secret-volume\") pod \"collect-profiles-29416080-fmwrm\" (UID: \"02d8079d-e66d-4b57-808f-72565ebc5067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.319916 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/02d8079d-e66d-4b57-808f-72565ebc5067-config-volume\") pod \"collect-profiles-29416080-fmwrm\" (UID: \"02d8079d-e66d-4b57-808f-72565ebc5067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.329048 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/02d8079d-e66d-4b57-808f-72565ebc5067-secret-volume\") pod \"collect-profiles-29416080-fmwrm\" (UID: \"02d8079d-e66d-4b57-808f-72565ebc5067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.338768 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk4fp\" (UniqueName: \"kubernetes.io/projected/02d8079d-e66d-4b57-808f-72565ebc5067-kube-api-access-pk4fp\") pod \"collect-profiles-29416080-fmwrm\" (UID: \"02d8079d-e66d-4b57-808f-72565ebc5067\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.486907 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:00 crc kubenswrapper[4982]: I1205 20:00:00.986217 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm"] Dec 05 20:00:01 crc kubenswrapper[4982]: I1205 20:00:01.345310 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" event={"ID":"02d8079d-e66d-4b57-808f-72565ebc5067","Type":"ContainerStarted","Data":"cb14117d3869763ba4fad5612cd21624f6f96682631d8f387fb405911c91996f"} Dec 05 20:00:01 crc kubenswrapper[4982]: I1205 20:00:01.345611 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" event={"ID":"02d8079d-e66d-4b57-808f-72565ebc5067","Type":"ContainerStarted","Data":"ff91d0a131c3ef0cfc3fd36fafed74a54002404a93976f7482ee57832725cd82"} Dec 05 20:00:01 crc kubenswrapper[4982]: I1205 20:00:01.365785 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" podStartSLOduration=1.365761214 podStartE2EDuration="1.365761214s" podCreationTimestamp="2025-12-05 20:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 20:00:01.363249061 +0000 UTC m=+2780.245135046" watchObservedRunningTime="2025-12-05 20:00:01.365761214 +0000 UTC m=+2780.247647219" Dec 05 20:00:02 crc kubenswrapper[4982]: I1205 20:00:02.356021 4982 generic.go:334] "Generic (PLEG): container finished" podID="02d8079d-e66d-4b57-808f-72565ebc5067" containerID="cb14117d3869763ba4fad5612cd21624f6f96682631d8f387fb405911c91996f" exitCode=0 Dec 05 20:00:02 crc kubenswrapper[4982]: I1205 20:00:02.356065 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" event={"ID":"02d8079d-e66d-4b57-808f-72565ebc5067","Type":"ContainerDied","Data":"cb14117d3869763ba4fad5612cd21624f6f96682631d8f387fb405911c91996f"} Dec 05 20:00:03 crc kubenswrapper[4982]: I1205 20:00:03.874196 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:03 crc kubenswrapper[4982]: I1205 20:00:03.889073 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/02d8079d-e66d-4b57-808f-72565ebc5067-config-volume\") pod \"02d8079d-e66d-4b57-808f-72565ebc5067\" (UID: \"02d8079d-e66d-4b57-808f-72565ebc5067\") " Dec 05 20:00:03 crc kubenswrapper[4982]: I1205 20:00:03.889257 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/02d8079d-e66d-4b57-808f-72565ebc5067-secret-volume\") pod \"02d8079d-e66d-4b57-808f-72565ebc5067\" (UID: \"02d8079d-e66d-4b57-808f-72565ebc5067\") " Dec 05 20:00:03 crc kubenswrapper[4982]: I1205 20:00:03.889550 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02d8079d-e66d-4b57-808f-72565ebc5067-config-volume" (OuterVolumeSpecName: "config-volume") pod "02d8079d-e66d-4b57-808f-72565ebc5067" (UID: "02d8079d-e66d-4b57-808f-72565ebc5067"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 20:00:03 crc kubenswrapper[4982]: I1205 20:00:03.890011 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pk4fp\" (UniqueName: \"kubernetes.io/projected/02d8079d-e66d-4b57-808f-72565ebc5067-kube-api-access-pk4fp\") pod \"02d8079d-e66d-4b57-808f-72565ebc5067\" (UID: \"02d8079d-e66d-4b57-808f-72565ebc5067\") " Dec 05 20:00:03 crc kubenswrapper[4982]: I1205 20:00:03.890411 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/02d8079d-e66d-4b57-808f-72565ebc5067-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 20:00:03 crc kubenswrapper[4982]: I1205 20:00:03.897363 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02d8079d-e66d-4b57-808f-72565ebc5067-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "02d8079d-e66d-4b57-808f-72565ebc5067" (UID: "02d8079d-e66d-4b57-808f-72565ebc5067"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:00:03 crc kubenswrapper[4982]: I1205 20:00:03.910660 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02d8079d-e66d-4b57-808f-72565ebc5067-kube-api-access-pk4fp" (OuterVolumeSpecName: "kube-api-access-pk4fp") pod "02d8079d-e66d-4b57-808f-72565ebc5067" (UID: "02d8079d-e66d-4b57-808f-72565ebc5067"). InnerVolumeSpecName "kube-api-access-pk4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:00:03 crc kubenswrapper[4982]: I1205 20:00:03.992782 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/02d8079d-e66d-4b57-808f-72565ebc5067-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 20:00:03 crc kubenswrapper[4982]: I1205 20:00:03.992847 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pk4fp\" (UniqueName: \"kubernetes.io/projected/02d8079d-e66d-4b57-808f-72565ebc5067-kube-api-access-pk4fp\") on node \"crc\" DevicePath \"\"" Dec 05 20:00:04 crc kubenswrapper[4982]: I1205 20:00:04.376760 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" event={"ID":"02d8079d-e66d-4b57-808f-72565ebc5067","Type":"ContainerDied","Data":"ff91d0a131c3ef0cfc3fd36fafed74a54002404a93976f7482ee57832725cd82"} Dec 05 20:00:04 crc kubenswrapper[4982]: I1205 20:00:04.376815 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ff91d0a131c3ef0cfc3fd36fafed74a54002404a93976f7482ee57832725cd82" Dec 05 20:00:04 crc kubenswrapper[4982]: I1205 20:00:04.376809 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416080-fmwrm" Dec 05 20:00:04 crc kubenswrapper[4982]: I1205 20:00:04.449882 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw"] Dec 05 20:00:04 crc kubenswrapper[4982]: I1205 20:00:04.462478 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416035-vz5vw"] Dec 05 20:00:05 crc kubenswrapper[4982]: I1205 20:00:05.403388 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2bde128-2402-4be6-bbe8-ef3518e58045" path="/var/lib/kubelet/pods/c2bde128-2402-4be6-bbe8-ef3518e58045/volumes" Dec 05 20:00:08 crc kubenswrapper[4982]: I1205 20:00:08.419071 4982 generic.go:334] "Generic (PLEG): container finished" podID="592205cb-46f4-4bc6-9329-a90e5e63400e" containerID="79c9f013ceb70073f939ff4f53b8038f5f10c1531bf69b4a28271a14c3faf74e" exitCode=0 Dec 05 20:00:08 crc kubenswrapper[4982]: I1205 20:00:08.419143 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" event={"ID":"592205cb-46f4-4bc6-9329-a90e5e63400e","Type":"ContainerDied","Data":"79c9f013ceb70073f939ff4f53b8038f5f10c1531bf69b4a28271a14c3faf74e"} Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.053415 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.254487 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-cell1-compute-config-1\") pod \"592205cb-46f4-4bc6-9329-a90e5e63400e\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.254540 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-migration-ssh-key-1\") pod \"592205cb-46f4-4bc6-9329-a90e5e63400e\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.254598 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-ssh-key\") pod \"592205cb-46f4-4bc6-9329-a90e5e63400e\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.254625 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-migration-ssh-key-0\") pod \"592205cb-46f4-4bc6-9329-a90e5e63400e\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.255676 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vxw5m\" (UniqueName: \"kubernetes.io/projected/592205cb-46f4-4bc6-9329-a90e5e63400e-kube-api-access-vxw5m\") pod \"592205cb-46f4-4bc6-9329-a90e5e63400e\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.255736 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-extra-config-0\") pod \"592205cb-46f4-4bc6-9329-a90e5e63400e\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.255800 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-cell1-compute-config-0\") pod \"592205cb-46f4-4bc6-9329-a90e5e63400e\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.255875 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-inventory\") pod \"592205cb-46f4-4bc6-9329-a90e5e63400e\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.255988 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-combined-ca-bundle\") pod \"592205cb-46f4-4bc6-9329-a90e5e63400e\" (UID: \"592205cb-46f4-4bc6-9329-a90e5e63400e\") " Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.261413 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/592205cb-46f4-4bc6-9329-a90e5e63400e-kube-api-access-vxw5m" (OuterVolumeSpecName: "kube-api-access-vxw5m") pod "592205cb-46f4-4bc6-9329-a90e5e63400e" (UID: "592205cb-46f4-4bc6-9329-a90e5e63400e"). InnerVolumeSpecName "kube-api-access-vxw5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.266375 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "592205cb-46f4-4bc6-9329-a90e5e63400e" (UID: "592205cb-46f4-4bc6-9329-a90e5e63400e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.286515 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "592205cb-46f4-4bc6-9329-a90e5e63400e" (UID: "592205cb-46f4-4bc6-9329-a90e5e63400e"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.292744 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "592205cb-46f4-4bc6-9329-a90e5e63400e" (UID: "592205cb-46f4-4bc6-9329-a90e5e63400e"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.296808 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "592205cb-46f4-4bc6-9329-a90e5e63400e" (UID: "592205cb-46f4-4bc6-9329-a90e5e63400e"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.309262 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "592205cb-46f4-4bc6-9329-a90e5e63400e" (UID: "592205cb-46f4-4bc6-9329-a90e5e63400e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.309441 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "592205cb-46f4-4bc6-9329-a90e5e63400e" (UID: "592205cb-46f4-4bc6-9329-a90e5e63400e"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.315459 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-inventory" (OuterVolumeSpecName: "inventory") pod "592205cb-46f4-4bc6-9329-a90e5e63400e" (UID: "592205cb-46f4-4bc6-9329-a90e5e63400e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.318515 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "592205cb-46f4-4bc6-9329-a90e5e63400e" (UID: "592205cb-46f4-4bc6-9329-a90e5e63400e"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.357681 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vxw5m\" (UniqueName: \"kubernetes.io/projected/592205cb-46f4-4bc6-9329-a90e5e63400e-kube-api-access-vxw5m\") on node \"crc\" DevicePath \"\"" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.357725 4982 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.357737 4982 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.357749 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.357760 4982 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.357776 4982 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.357788 4982 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.357799 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.357809 4982 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/592205cb-46f4-4bc6-9329-a90e5e63400e-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.452560 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" event={"ID":"592205cb-46f4-4bc6-9329-a90e5e63400e","Type":"ContainerDied","Data":"9d6906ff5107edb990a885ba69d8b19dabd23803084ca362d4a77634c54c4d4b"} Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.452597 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-qxxl7" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.452610 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d6906ff5107edb990a885ba69d8b19dabd23803084ca362d4a77634c54c4d4b" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.558394 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc"] Dec 05 20:00:10 crc kubenswrapper[4982]: E1205 20:00:10.558832 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02d8079d-e66d-4b57-808f-72565ebc5067" containerName="collect-profiles" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.558850 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="02d8079d-e66d-4b57-808f-72565ebc5067" containerName="collect-profiles" Dec 05 20:00:10 crc kubenswrapper[4982]: E1205 20:00:10.558878 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="592205cb-46f4-4bc6-9329-a90e5e63400e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.558885 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="592205cb-46f4-4bc6-9329-a90e5e63400e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.559073 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="02d8079d-e66d-4b57-808f-72565ebc5067" containerName="collect-profiles" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.559087 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="592205cb-46f4-4bc6-9329-a90e5e63400e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.559886 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.562437 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.563638 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.563666 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.563793 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-mfvz2" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.565381 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.583842 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc"] Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.665401 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.665458 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.665496 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjzlz\" (UniqueName: \"kubernetes.io/projected/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-kube-api-access-rjzlz\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.665558 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.665654 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.665686 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.665715 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.767358 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.767399 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.767428 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjzlz\" (UniqueName: \"kubernetes.io/projected/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-kube-api-access-rjzlz\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.767483 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.767568 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.767595 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.767622 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.770979 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.771482 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.771766 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.771890 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.773639 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.780955 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.782755 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjzlz\" (UniqueName: \"kubernetes.io/projected/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-kube-api-access-rjzlz\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:10 crc kubenswrapper[4982]: I1205 20:00:10.874643 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:00:11 crc kubenswrapper[4982]: I1205 20:00:11.499992 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc"] Dec 05 20:00:12 crc kubenswrapper[4982]: I1205 20:00:12.495521 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" event={"ID":"f9f55ee0-0c0c-4edf-9b1f-17ff56560708","Type":"ContainerStarted","Data":"828fa92605a2b24e70da3abbebb3448cf37d89a0f96c2d160e6fc3b41d1c6b30"} Dec 05 20:00:12 crc kubenswrapper[4982]: I1205 20:00:12.496099 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" event={"ID":"f9f55ee0-0c0c-4edf-9b1f-17ff56560708","Type":"ContainerStarted","Data":"cb849ccf80f6a6d9d83f3e5e0ae2792c37474a15362aa21246a6f009b2505cd3"} Dec 05 20:00:12 crc kubenswrapper[4982]: I1205 20:00:12.523505 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" podStartSLOduration=2.049651039 podStartE2EDuration="2.523486813s" podCreationTimestamp="2025-12-05 20:00:10 +0000 UTC" firstStartedPulling="2025-12-05 20:00:11.507345279 +0000 UTC m=+2790.389231274" lastFinishedPulling="2025-12-05 20:00:11.981181053 +0000 UTC m=+2790.863067048" observedRunningTime="2025-12-05 20:00:12.514177968 +0000 UTC m=+2791.396063963" watchObservedRunningTime="2025-12-05 20:00:12.523486813 +0000 UTC m=+2791.405372808" Dec 05 20:00:12 crc kubenswrapper[4982]: I1205 20:00:12.558843 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:00:12 crc kubenswrapper[4982]: I1205 20:00:12.558922 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:00:42 crc kubenswrapper[4982]: I1205 20:00:42.557550 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:00:42 crc kubenswrapper[4982]: I1205 20:00:42.557978 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:00:42 crc kubenswrapper[4982]: I1205 20:00:42.558019 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 20:00:42 crc kubenswrapper[4982]: I1205 20:00:42.558745 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"461731dbe04cb67fe028db49bb8a95b2134c2a64fb1c09002ce2e414c01ad2d4"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 20:00:42 crc kubenswrapper[4982]: I1205 20:00:42.558800 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://461731dbe04cb67fe028db49bb8a95b2134c2a64fb1c09002ce2e414c01ad2d4" gracePeriod=600 Dec 05 20:00:42 crc kubenswrapper[4982]: I1205 20:00:42.801457 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="461731dbe04cb67fe028db49bb8a95b2134c2a64fb1c09002ce2e414c01ad2d4" exitCode=0 Dec 05 20:00:42 crc kubenswrapper[4982]: I1205 20:00:42.801772 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"461731dbe04cb67fe028db49bb8a95b2134c2a64fb1c09002ce2e414c01ad2d4"} Dec 05 20:00:42 crc kubenswrapper[4982]: I1205 20:00:42.801809 4982 scope.go:117] "RemoveContainer" containerID="51b22a64e11bb7482668d7165c7c7ba92821e3344dadb508b863cf1d20b1afcf" Dec 05 20:00:43 crc kubenswrapper[4982]: I1205 20:00:43.817535 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3"} Dec 05 20:00:51 crc kubenswrapper[4982]: I1205 20:00:51.283619 4982 scope.go:117] "RemoveContainer" containerID="06367966c92903df86409e4f479a9a62382fb13013625ff88736b2e844d40eb1" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.161343 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29416081-4j7pq"] Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.163446 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.176342 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29416081-4j7pq"] Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.328942 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-config-data\") pod \"keystone-cron-29416081-4j7pq\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.329201 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-combined-ca-bundle\") pod \"keystone-cron-29416081-4j7pq\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.329341 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lvxp\" (UniqueName: \"kubernetes.io/projected/76cb36b1-d2a2-4742-8886-fc3305fb4082-kube-api-access-7lvxp\") pod \"keystone-cron-29416081-4j7pq\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.329414 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-fernet-keys\") pod \"keystone-cron-29416081-4j7pq\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.431538 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-config-data\") pod \"keystone-cron-29416081-4j7pq\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.431598 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-combined-ca-bundle\") pod \"keystone-cron-29416081-4j7pq\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.431631 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lvxp\" (UniqueName: \"kubernetes.io/projected/76cb36b1-d2a2-4742-8886-fc3305fb4082-kube-api-access-7lvxp\") pod \"keystone-cron-29416081-4j7pq\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.431655 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-fernet-keys\") pod \"keystone-cron-29416081-4j7pq\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.440048 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-combined-ca-bundle\") pod \"keystone-cron-29416081-4j7pq\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.440176 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-config-data\") pod \"keystone-cron-29416081-4j7pq\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.453611 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-fernet-keys\") pod \"keystone-cron-29416081-4j7pq\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.463977 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lvxp\" (UniqueName: \"kubernetes.io/projected/76cb36b1-d2a2-4742-8886-fc3305fb4082-kube-api-access-7lvxp\") pod \"keystone-cron-29416081-4j7pq\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:00 crc kubenswrapper[4982]: I1205 20:01:00.490203 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:01 crc kubenswrapper[4982]: I1205 20:01:01.019374 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29416081-4j7pq"] Dec 05 20:01:02 crc kubenswrapper[4982]: I1205 20:01:02.001960 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29416081-4j7pq" event={"ID":"76cb36b1-d2a2-4742-8886-fc3305fb4082","Type":"ContainerStarted","Data":"ddba92a5182417bb56993b8338e7d5a425b7a0ec5253bbdf2077e24b3dcbf977"} Dec 05 20:01:02 crc kubenswrapper[4982]: I1205 20:01:02.002293 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29416081-4j7pq" event={"ID":"76cb36b1-d2a2-4742-8886-fc3305fb4082","Type":"ContainerStarted","Data":"ed9ce05cd0ea173e3ad2835a2008f8bc6ec08cb1daa19198adf89b60d3c44c73"} Dec 05 20:01:02 crc kubenswrapper[4982]: I1205 20:01:02.035002 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29416081-4j7pq" podStartSLOduration=2.034980824 podStartE2EDuration="2.034980824s" podCreationTimestamp="2025-12-05 20:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 20:01:02.022488469 +0000 UTC m=+2840.904374464" watchObservedRunningTime="2025-12-05 20:01:02.034980824 +0000 UTC m=+2840.916866819" Dec 05 20:01:04 crc kubenswrapper[4982]: I1205 20:01:04.060712 4982 generic.go:334] "Generic (PLEG): container finished" podID="76cb36b1-d2a2-4742-8886-fc3305fb4082" containerID="ddba92a5182417bb56993b8338e7d5a425b7a0ec5253bbdf2077e24b3dcbf977" exitCode=0 Dec 05 20:01:04 crc kubenswrapper[4982]: I1205 20:01:04.060787 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29416081-4j7pq" event={"ID":"76cb36b1-d2a2-4742-8886-fc3305fb4082","Type":"ContainerDied","Data":"ddba92a5182417bb56993b8338e7d5a425b7a0ec5253bbdf2077e24b3dcbf977"} Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.538782 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.652192 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-combined-ca-bundle\") pod \"76cb36b1-d2a2-4742-8886-fc3305fb4082\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.652292 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lvxp\" (UniqueName: \"kubernetes.io/projected/76cb36b1-d2a2-4742-8886-fc3305fb4082-kube-api-access-7lvxp\") pod \"76cb36b1-d2a2-4742-8886-fc3305fb4082\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.652351 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-fernet-keys\") pod \"76cb36b1-d2a2-4742-8886-fc3305fb4082\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.652476 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-config-data\") pod \"76cb36b1-d2a2-4742-8886-fc3305fb4082\" (UID: \"76cb36b1-d2a2-4742-8886-fc3305fb4082\") " Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.658797 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76cb36b1-d2a2-4742-8886-fc3305fb4082-kube-api-access-7lvxp" (OuterVolumeSpecName: "kube-api-access-7lvxp") pod "76cb36b1-d2a2-4742-8886-fc3305fb4082" (UID: "76cb36b1-d2a2-4742-8886-fc3305fb4082"). InnerVolumeSpecName "kube-api-access-7lvxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.659402 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "76cb36b1-d2a2-4742-8886-fc3305fb4082" (UID: "76cb36b1-d2a2-4742-8886-fc3305fb4082"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.691767 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76cb36b1-d2a2-4742-8886-fc3305fb4082" (UID: "76cb36b1-d2a2-4742-8886-fc3305fb4082"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.736904 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-config-data" (OuterVolumeSpecName: "config-data") pod "76cb36b1-d2a2-4742-8886-fc3305fb4082" (UID: "76cb36b1-d2a2-4742-8886-fc3305fb4082"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.757298 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lvxp\" (UniqueName: \"kubernetes.io/projected/76cb36b1-d2a2-4742-8886-fc3305fb4082-kube-api-access-7lvxp\") on node \"crc\" DevicePath \"\"" Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.757333 4982 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.757344 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 20:01:05 crc kubenswrapper[4982]: I1205 20:01:05.757352 4982 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76cb36b1-d2a2-4742-8886-fc3305fb4082-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 20:01:06 crc kubenswrapper[4982]: I1205 20:01:06.085620 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29416081-4j7pq" event={"ID":"76cb36b1-d2a2-4742-8886-fc3305fb4082","Type":"ContainerDied","Data":"ed9ce05cd0ea173e3ad2835a2008f8bc6ec08cb1daa19198adf89b60d3c44c73"} Dec 05 20:01:06 crc kubenswrapper[4982]: I1205 20:01:06.085934 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed9ce05cd0ea173e3ad2835a2008f8bc6ec08cb1daa19198adf89b60d3c44c73" Dec 05 20:01:06 crc kubenswrapper[4982]: I1205 20:01:06.085689 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29416081-4j7pq" Dec 05 20:02:36 crc kubenswrapper[4982]: I1205 20:02:36.019622 4982 generic.go:334] "Generic (PLEG): container finished" podID="f9f55ee0-0c0c-4edf-9b1f-17ff56560708" containerID="828fa92605a2b24e70da3abbebb3448cf37d89a0f96c2d160e6fc3b41d1c6b30" exitCode=0 Dec 05 20:02:36 crc kubenswrapper[4982]: I1205 20:02:36.019767 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" event={"ID":"f9f55ee0-0c0c-4edf-9b1f-17ff56560708","Type":"ContainerDied","Data":"828fa92605a2b24e70da3abbebb3448cf37d89a0f96c2d160e6fc3b41d1c6b30"} Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.506591 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.567646 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-0\") pod \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.567736 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-telemetry-combined-ca-bundle\") pod \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.567823 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-1\") pod \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.567987 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ssh-key\") pod \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.568100 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-inventory\") pod \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.568296 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjzlz\" (UniqueName: \"kubernetes.io/projected/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-kube-api-access-rjzlz\") pod \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.568408 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-2\") pod \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\" (UID: \"f9f55ee0-0c0c-4edf-9b1f-17ff56560708\") " Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.585487 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-kube-api-access-rjzlz" (OuterVolumeSpecName: "kube-api-access-rjzlz") pod "f9f55ee0-0c0c-4edf-9b1f-17ff56560708" (UID: "f9f55ee0-0c0c-4edf-9b1f-17ff56560708"). InnerVolumeSpecName "kube-api-access-rjzlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.585491 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "f9f55ee0-0c0c-4edf-9b1f-17ff56560708" (UID: "f9f55ee0-0c0c-4edf-9b1f-17ff56560708"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.599575 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "f9f55ee0-0c0c-4edf-9b1f-17ff56560708" (UID: "f9f55ee0-0c0c-4edf-9b1f-17ff56560708"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.607786 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "f9f55ee0-0c0c-4edf-9b1f-17ff56560708" (UID: "f9f55ee0-0c0c-4edf-9b1f-17ff56560708"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.609365 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f9f55ee0-0c0c-4edf-9b1f-17ff56560708" (UID: "f9f55ee0-0c0c-4edf-9b1f-17ff56560708"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.610923 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-inventory" (OuterVolumeSpecName: "inventory") pod "f9f55ee0-0c0c-4edf-9b1f-17ff56560708" (UID: "f9f55ee0-0c0c-4edf-9b1f-17ff56560708"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.615598 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "f9f55ee0-0c0c-4edf-9b1f-17ff56560708" (UID: "f9f55ee0-0c0c-4edf-9b1f-17ff56560708"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.672220 4982 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.672335 4982 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.672400 4982 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.672420 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.672495 4982 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.672521 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjzlz\" (UniqueName: \"kubernetes.io/projected/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-kube-api-access-rjzlz\") on node \"crc\" DevicePath \"\"" Dec 05 20:02:37 crc kubenswrapper[4982]: I1205 20:02:37.672544 4982 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/f9f55ee0-0c0c-4edf-9b1f-17ff56560708-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 05 20:02:38 crc kubenswrapper[4982]: I1205 20:02:38.041014 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" event={"ID":"f9f55ee0-0c0c-4edf-9b1f-17ff56560708","Type":"ContainerDied","Data":"cb849ccf80f6a6d9d83f3e5e0ae2792c37474a15362aa21246a6f009b2505cd3"} Dec 05 20:02:38 crc kubenswrapper[4982]: I1205 20:02:38.041311 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb849ccf80f6a6d9d83f3e5e0ae2792c37474a15362aa21246a6f009b2505cd3" Dec 05 20:02:38 crc kubenswrapper[4982]: I1205 20:02:38.041117 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc" Dec 05 20:02:38 crc kubenswrapper[4982]: E1205 20:02:38.204580 4982 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9f55ee0_0c0c_4edf_9b1f_17ff56560708.slice/crio-cb849ccf80f6a6d9d83f3e5e0ae2792c37474a15362aa21246a6f009b2505cd3\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf9f55ee0_0c0c_4edf_9b1f_17ff56560708.slice\": RecentStats: unable to find data in memory cache]" Dec 05 20:02:42 crc kubenswrapper[4982]: I1205 20:02:42.562570 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:02:42 crc kubenswrapper[4982]: I1205 20:02:42.563359 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:03:12 crc kubenswrapper[4982]: I1205 20:03:12.557923 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:03:12 crc kubenswrapper[4982]: I1205 20:03:12.558998 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:03:42 crc kubenswrapper[4982]: I1205 20:03:42.562708 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:03:42 crc kubenswrapper[4982]: I1205 20:03:42.563522 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:03:42 crc kubenswrapper[4982]: I1205 20:03:42.563593 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 20:03:42 crc kubenswrapper[4982]: I1205 20:03:42.564753 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 20:03:42 crc kubenswrapper[4982]: I1205 20:03:42.564880 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" gracePeriod=600 Dec 05 20:03:42 crc kubenswrapper[4982]: E1205 20:03:42.695787 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:03:42 crc kubenswrapper[4982]: I1205 20:03:42.777223 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" exitCode=0 Dec 05 20:03:42 crc kubenswrapper[4982]: I1205 20:03:42.777276 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3"} Dec 05 20:03:42 crc kubenswrapper[4982]: I1205 20:03:42.777315 4982 scope.go:117] "RemoveContainer" containerID="461731dbe04cb67fe028db49bb8a95b2134c2a64fb1c09002ce2e414c01ad2d4" Dec 05 20:03:42 crc kubenswrapper[4982]: I1205 20:03:42.777979 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:03:42 crc kubenswrapper[4982]: E1205 20:03:42.778247 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:03:58 crc kubenswrapper[4982]: I1205 20:03:58.390506 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:03:58 crc kubenswrapper[4982]: E1205 20:03:58.391354 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.757311 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 20:04:03 crc kubenswrapper[4982]: E1205 20:04:03.758292 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9f55ee0-0c0c-4edf-9b1f-17ff56560708" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.758307 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9f55ee0-0c0c-4edf-9b1f-17ff56560708" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 20:04:03 crc kubenswrapper[4982]: E1205 20:04:03.758356 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76cb36b1-d2a2-4742-8886-fc3305fb4082" containerName="keystone-cron" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.758362 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="76cb36b1-d2a2-4742-8886-fc3305fb4082" containerName="keystone-cron" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.758558 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9f55ee0-0c0c-4edf-9b1f-17ff56560708" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.758574 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="76cb36b1-d2a2-4742-8886-fc3305fb4082" containerName="keystone-cron" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.759315 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.763325 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.763381 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.763828 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-lld7m" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.764389 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.791627 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.893917 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.893974 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.894004 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.894031 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnbfx\" (UniqueName: \"kubernetes.io/projected/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-kube-api-access-pnbfx\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.894264 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-config-data\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.894536 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.894579 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.894663 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.894706 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.996108 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.996254 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.996287 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.996308 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.996329 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnbfx\" (UniqueName: \"kubernetes.io/projected/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-kube-api-access-pnbfx\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.996380 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-config-data\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.996459 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.996481 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.996524 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.996892 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.997184 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.997713 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.997862 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-config-data\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:03 crc kubenswrapper[4982]: I1205 20:04:03.998366 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:04 crc kubenswrapper[4982]: I1205 20:04:04.002798 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:04 crc kubenswrapper[4982]: I1205 20:04:04.003519 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:04 crc kubenswrapper[4982]: I1205 20:04:04.017907 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:04 crc kubenswrapper[4982]: I1205 20:04:04.019547 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnbfx\" (UniqueName: \"kubernetes.io/projected/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-kube-api-access-pnbfx\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:04 crc kubenswrapper[4982]: I1205 20:04:04.029666 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"tempest-tests-tempest\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " pod="openstack/tempest-tests-tempest" Dec 05 20:04:04 crc kubenswrapper[4982]: I1205 20:04:04.095786 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 20:04:04 crc kubenswrapper[4982]: I1205 20:04:04.559830 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 20:04:04 crc kubenswrapper[4982]: I1205 20:04:04.565511 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 20:04:05 crc kubenswrapper[4982]: I1205 20:04:05.024614 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"d3c6fe00-7794-450c-a588-bd1e2afdbe8c","Type":"ContainerStarted","Data":"128d73fccdc125c66e43bbd90a22d1fb80ebf168e0713346da88f80ddb0cd2b6"} Dec 05 20:04:10 crc kubenswrapper[4982]: I1205 20:04:10.390323 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:04:10 crc kubenswrapper[4982]: E1205 20:04:10.391547 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:04:25 crc kubenswrapper[4982]: I1205 20:04:25.390305 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:04:25 crc kubenswrapper[4982]: E1205 20:04:25.391074 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:04:34 crc kubenswrapper[4982]: E1205 20:04:34.308795 4982 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 05 20:04:34 crc kubenswrapper[4982]: E1205 20:04:34.311239 4982 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pnbfx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(d3c6fe00-7794-450c-a588-bd1e2afdbe8c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 20:04:34 crc kubenswrapper[4982]: E1205 20:04:34.313747 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="d3c6fe00-7794-450c-a588-bd1e2afdbe8c" Dec 05 20:04:34 crc kubenswrapper[4982]: E1205 20:04:34.328234 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="d3c6fe00-7794-450c-a588-bd1e2afdbe8c" Dec 05 20:04:40 crc kubenswrapper[4982]: I1205 20:04:40.391473 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:04:40 crc kubenswrapper[4982]: E1205 20:04:40.392277 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:04:49 crc kubenswrapper[4982]: I1205 20:04:49.838607 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 20:04:51 crc kubenswrapper[4982]: I1205 20:04:51.401720 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:04:51 crc kubenswrapper[4982]: E1205 20:04:51.402294 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:04:51 crc kubenswrapper[4982]: I1205 20:04:51.557407 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"d3c6fe00-7794-450c-a588-bd1e2afdbe8c","Type":"ContainerStarted","Data":"5b686aaae7f072938b0d7e61caaf6692e537d67c67f67730bc52968ae362dd69"} Dec 05 20:04:51 crc kubenswrapper[4982]: I1205 20:04:51.598627 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=4.321967116 podStartE2EDuration="49.598601566s" podCreationTimestamp="2025-12-05 20:04:02 +0000 UTC" firstStartedPulling="2025-12-05 20:04:04.559641897 +0000 UTC m=+3023.441527892" lastFinishedPulling="2025-12-05 20:04:49.836276347 +0000 UTC m=+3068.718162342" observedRunningTime="2025-12-05 20:04:51.589200707 +0000 UTC m=+3070.471086702" watchObservedRunningTime="2025-12-05 20:04:51.598601566 +0000 UTC m=+3070.480487561" Dec 05 20:05:04 crc kubenswrapper[4982]: I1205 20:05:04.390955 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:05:04 crc kubenswrapper[4982]: E1205 20:05:04.391648 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:05:15 crc kubenswrapper[4982]: I1205 20:05:15.390136 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:05:15 crc kubenswrapper[4982]: E1205 20:05:15.390968 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:05:30 crc kubenswrapper[4982]: I1205 20:05:30.391164 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:05:30 crc kubenswrapper[4982]: E1205 20:05:30.392029 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:05:45 crc kubenswrapper[4982]: I1205 20:05:45.391065 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:05:45 crc kubenswrapper[4982]: E1205 20:05:45.391822 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:05:56 crc kubenswrapper[4982]: I1205 20:05:56.396072 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:05:56 crc kubenswrapper[4982]: E1205 20:05:56.397050 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:06:09 crc kubenswrapper[4982]: I1205 20:06:09.392483 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:06:09 crc kubenswrapper[4982]: E1205 20:06:09.393229 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.271062 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7pvr5"] Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.274486 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.283101 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7pvr5"] Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.448134 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcrxc\" (UniqueName: \"kubernetes.io/projected/eb6065cb-11b3-46dc-a1af-7116d6a385e8-kube-api-access-hcrxc\") pod \"certified-operators-7pvr5\" (UID: \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\") " pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.448386 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb6065cb-11b3-46dc-a1af-7116d6a385e8-utilities\") pod \"certified-operators-7pvr5\" (UID: \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\") " pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.448426 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb6065cb-11b3-46dc-a1af-7116d6a385e8-catalog-content\") pod \"certified-operators-7pvr5\" (UID: \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\") " pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.468785 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-x6l8s"] Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.474372 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.490186 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x6l8s"] Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.549944 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb6065cb-11b3-46dc-a1af-7116d6a385e8-utilities\") pod \"certified-operators-7pvr5\" (UID: \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\") " pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.550015 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb6065cb-11b3-46dc-a1af-7116d6a385e8-catalog-content\") pod \"certified-operators-7pvr5\" (UID: \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\") " pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.550052 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcrxc\" (UniqueName: \"kubernetes.io/projected/eb6065cb-11b3-46dc-a1af-7116d6a385e8-kube-api-access-hcrxc\") pod \"certified-operators-7pvr5\" (UID: \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\") " pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.550756 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb6065cb-11b3-46dc-a1af-7116d6a385e8-utilities\") pod \"certified-operators-7pvr5\" (UID: \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\") " pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.550867 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb6065cb-11b3-46dc-a1af-7116d6a385e8-catalog-content\") pod \"certified-operators-7pvr5\" (UID: \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\") " pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.572105 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcrxc\" (UniqueName: \"kubernetes.io/projected/eb6065cb-11b3-46dc-a1af-7116d6a385e8-kube-api-access-hcrxc\") pod \"certified-operators-7pvr5\" (UID: \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\") " pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.601847 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.652588 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be9e7a8c-d8e1-46aa-b46a-55775db5b335-utilities\") pod \"redhat-marketplace-x6l8s\" (UID: \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\") " pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.652723 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be9e7a8c-d8e1-46aa-b46a-55775db5b335-catalog-content\") pod \"redhat-marketplace-x6l8s\" (UID: \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\") " pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.652817 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hsw5\" (UniqueName: \"kubernetes.io/projected/be9e7a8c-d8e1-46aa-b46a-55775db5b335-kube-api-access-6hsw5\") pod \"redhat-marketplace-x6l8s\" (UID: \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\") " pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.755695 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be9e7a8c-d8e1-46aa-b46a-55775db5b335-utilities\") pod \"redhat-marketplace-x6l8s\" (UID: \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\") " pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.756074 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be9e7a8c-d8e1-46aa-b46a-55775db5b335-catalog-content\") pod \"redhat-marketplace-x6l8s\" (UID: \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\") " pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.756134 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hsw5\" (UniqueName: \"kubernetes.io/projected/be9e7a8c-d8e1-46aa-b46a-55775db5b335-kube-api-access-6hsw5\") pod \"redhat-marketplace-x6l8s\" (UID: \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\") " pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.757272 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be9e7a8c-d8e1-46aa-b46a-55775db5b335-utilities\") pod \"redhat-marketplace-x6l8s\" (UID: \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\") " pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.757483 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be9e7a8c-d8e1-46aa-b46a-55775db5b335-catalog-content\") pod \"redhat-marketplace-x6l8s\" (UID: \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\") " pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.794107 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hsw5\" (UniqueName: \"kubernetes.io/projected/be9e7a8c-d8e1-46aa-b46a-55775db5b335-kube-api-access-6hsw5\") pod \"redhat-marketplace-x6l8s\" (UID: \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\") " pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:16 crc kubenswrapper[4982]: I1205 20:06:16.795292 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:17 crc kubenswrapper[4982]: I1205 20:06:17.261126 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7pvr5"] Dec 05 20:06:17 crc kubenswrapper[4982]: I1205 20:06:17.430853 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pvr5" event={"ID":"eb6065cb-11b3-46dc-a1af-7116d6a385e8","Type":"ContainerStarted","Data":"dc19fe06102c157136304f5266d4ba8a8ec04cc71a209b2231f50f74fe278dc9"} Dec 05 20:06:17 crc kubenswrapper[4982]: W1205 20:06:17.566515 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe9e7a8c_d8e1_46aa_b46a_55775db5b335.slice/crio-fdcc9751a4ea055f8ddfec0658ce0d80f67df9233cf00f506e3859e9339006fb WatchSource:0}: Error finding container fdcc9751a4ea055f8ddfec0658ce0d80f67df9233cf00f506e3859e9339006fb: Status 404 returned error can't find the container with id fdcc9751a4ea055f8ddfec0658ce0d80f67df9233cf00f506e3859e9339006fb Dec 05 20:06:17 crc kubenswrapper[4982]: I1205 20:06:17.571651 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x6l8s"] Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.442488 4982 generic.go:334] "Generic (PLEG): container finished" podID="be9e7a8c-d8e1-46aa-b46a-55775db5b335" containerID="08eb7fbc220d3278d3dc82e11dc70654400f300c3111635792cf3b720bac369d" exitCode=0 Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.442568 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6l8s" event={"ID":"be9e7a8c-d8e1-46aa-b46a-55775db5b335","Type":"ContainerDied","Data":"08eb7fbc220d3278d3dc82e11dc70654400f300c3111635792cf3b720bac369d"} Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.442742 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6l8s" event={"ID":"be9e7a8c-d8e1-46aa-b46a-55775db5b335","Type":"ContainerStarted","Data":"fdcc9751a4ea055f8ddfec0658ce0d80f67df9233cf00f506e3859e9339006fb"} Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.449190 4982 generic.go:334] "Generic (PLEG): container finished" podID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" containerID="aeac882d757ae9292adf719f8d442927dd4725e43fdc141a96a697f93d4f600f" exitCode=0 Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.449240 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pvr5" event={"ID":"eb6065cb-11b3-46dc-a1af-7116d6a385e8","Type":"ContainerDied","Data":"aeac882d757ae9292adf719f8d442927dd4725e43fdc141a96a697f93d4f600f"} Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.674217 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2t2qx"] Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.676856 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.758441 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2t2qx"] Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.806841 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-catalog-content\") pod \"community-operators-2t2qx\" (UID: \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\") " pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.806899 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-utilities\") pod \"community-operators-2t2qx\" (UID: \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\") " pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.807022 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8skb\" (UniqueName: \"kubernetes.io/projected/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-kube-api-access-d8skb\") pod \"community-operators-2t2qx\" (UID: \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\") " pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.908368 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-catalog-content\") pod \"community-operators-2t2qx\" (UID: \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\") " pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.908429 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-utilities\") pod \"community-operators-2t2qx\" (UID: \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\") " pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.908521 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8skb\" (UniqueName: \"kubernetes.io/projected/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-kube-api-access-d8skb\") pod \"community-operators-2t2qx\" (UID: \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\") " pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.909245 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-catalog-content\") pod \"community-operators-2t2qx\" (UID: \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\") " pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.909449 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-utilities\") pod \"community-operators-2t2qx\" (UID: \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\") " pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.925960 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8skb\" (UniqueName: \"kubernetes.io/projected/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-kube-api-access-d8skb\") pod \"community-operators-2t2qx\" (UID: \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\") " pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:18 crc kubenswrapper[4982]: I1205 20:06:18.995016 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:19 crc kubenswrapper[4982]: I1205 20:06:19.460281 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6l8s" event={"ID":"be9e7a8c-d8e1-46aa-b46a-55775db5b335","Type":"ContainerStarted","Data":"d88763153d95a06c1ff5e0d8b958497408268cc78a33bab9ac7b9f79b647afc7"} Dec 05 20:06:19 crc kubenswrapper[4982]: I1205 20:06:19.471367 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pvr5" event={"ID":"eb6065cb-11b3-46dc-a1af-7116d6a385e8","Type":"ContainerStarted","Data":"c6ba5219de74a0adbc86abf32d8c90d4a315ce1f5aec593a01358ac16c9241ad"} Dec 05 20:06:19 crc kubenswrapper[4982]: I1205 20:06:19.631851 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2t2qx"] Dec 05 20:06:19 crc kubenswrapper[4982]: W1205 20:06:19.632630 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c1e3e6a_59d1_4c2c_95f0_ad5554dc0b14.slice/crio-c5774baa890e3b3feb7a95a3c84d062de9972975a287e59e2aa309df9a7193d2 WatchSource:0}: Error finding container c5774baa890e3b3feb7a95a3c84d062de9972975a287e59e2aa309df9a7193d2: Status 404 returned error can't find the container with id c5774baa890e3b3feb7a95a3c84d062de9972975a287e59e2aa309df9a7193d2 Dec 05 20:06:20 crc kubenswrapper[4982]: I1205 20:06:20.483086 4982 generic.go:334] "Generic (PLEG): container finished" podID="be9e7a8c-d8e1-46aa-b46a-55775db5b335" containerID="d88763153d95a06c1ff5e0d8b958497408268cc78a33bab9ac7b9f79b647afc7" exitCode=0 Dec 05 20:06:20 crc kubenswrapper[4982]: I1205 20:06:20.483192 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6l8s" event={"ID":"be9e7a8c-d8e1-46aa-b46a-55775db5b335","Type":"ContainerDied","Data":"d88763153d95a06c1ff5e0d8b958497408268cc78a33bab9ac7b9f79b647afc7"} Dec 05 20:06:20 crc kubenswrapper[4982]: I1205 20:06:20.489093 4982 generic.go:334] "Generic (PLEG): container finished" podID="9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" containerID="4ca60a81f870fbb7fcdc9b610fa9947e806e7d8d2d4aaa60bb4c969507a47caf" exitCode=0 Dec 05 20:06:20 crc kubenswrapper[4982]: I1205 20:06:20.490170 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t2qx" event={"ID":"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14","Type":"ContainerDied","Data":"4ca60a81f870fbb7fcdc9b610fa9947e806e7d8d2d4aaa60bb4c969507a47caf"} Dec 05 20:06:20 crc kubenswrapper[4982]: I1205 20:06:20.490215 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t2qx" event={"ID":"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14","Type":"ContainerStarted","Data":"c5774baa890e3b3feb7a95a3c84d062de9972975a287e59e2aa309df9a7193d2"} Dec 05 20:06:21 crc kubenswrapper[4982]: I1205 20:06:21.502459 4982 generic.go:334] "Generic (PLEG): container finished" podID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" containerID="c6ba5219de74a0adbc86abf32d8c90d4a315ce1f5aec593a01358ac16c9241ad" exitCode=0 Dec 05 20:06:21 crc kubenswrapper[4982]: I1205 20:06:21.502529 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pvr5" event={"ID":"eb6065cb-11b3-46dc-a1af-7116d6a385e8","Type":"ContainerDied","Data":"c6ba5219de74a0adbc86abf32d8c90d4a315ce1f5aec593a01358ac16c9241ad"} Dec 05 20:06:22 crc kubenswrapper[4982]: I1205 20:06:22.515470 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6l8s" event={"ID":"be9e7a8c-d8e1-46aa-b46a-55775db5b335","Type":"ContainerStarted","Data":"4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25"} Dec 05 20:06:22 crc kubenswrapper[4982]: I1205 20:06:22.517549 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t2qx" event={"ID":"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14","Type":"ContainerStarted","Data":"f8022342a62600dbcb6a17131cbfb992684d8df73d019e10b36bc773176485ce"} Dec 05 20:06:22 crc kubenswrapper[4982]: I1205 20:06:22.537614 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-x6l8s" podStartSLOduration=3.854163909 podStartE2EDuration="6.537594304s" podCreationTimestamp="2025-12-05 20:06:16 +0000 UTC" firstStartedPulling="2025-12-05 20:06:18.445187749 +0000 UTC m=+3157.327073754" lastFinishedPulling="2025-12-05 20:06:21.128618154 +0000 UTC m=+3160.010504149" observedRunningTime="2025-12-05 20:06:22.533022232 +0000 UTC m=+3161.414908227" watchObservedRunningTime="2025-12-05 20:06:22.537594304 +0000 UTC m=+3161.419480299" Dec 05 20:06:23 crc kubenswrapper[4982]: I1205 20:06:23.529018 4982 generic.go:334] "Generic (PLEG): container finished" podID="9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" containerID="f8022342a62600dbcb6a17131cbfb992684d8df73d019e10b36bc773176485ce" exitCode=0 Dec 05 20:06:23 crc kubenswrapper[4982]: I1205 20:06:23.529125 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t2qx" event={"ID":"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14","Type":"ContainerDied","Data":"f8022342a62600dbcb6a17131cbfb992684d8df73d019e10b36bc773176485ce"} Dec 05 20:06:23 crc kubenswrapper[4982]: I1205 20:06:23.534853 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pvr5" event={"ID":"eb6065cb-11b3-46dc-a1af-7116d6a385e8","Type":"ContainerStarted","Data":"c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e"} Dec 05 20:06:23 crc kubenswrapper[4982]: I1205 20:06:23.579482 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7pvr5" podStartSLOduration=3.873428316 podStartE2EDuration="7.579457298s" podCreationTimestamp="2025-12-05 20:06:16 +0000 UTC" firstStartedPulling="2025-12-05 20:06:18.452027465 +0000 UTC m=+3157.333913460" lastFinishedPulling="2025-12-05 20:06:22.158056447 +0000 UTC m=+3161.039942442" observedRunningTime="2025-12-05 20:06:23.572672363 +0000 UTC m=+3162.454558368" watchObservedRunningTime="2025-12-05 20:06:23.579457298 +0000 UTC m=+3162.461343293" Dec 05 20:06:24 crc kubenswrapper[4982]: I1205 20:06:24.392019 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:06:24 crc kubenswrapper[4982]: E1205 20:06:24.393084 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:06:24 crc kubenswrapper[4982]: I1205 20:06:24.555024 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t2qx" event={"ID":"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14","Type":"ContainerStarted","Data":"c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883"} Dec 05 20:06:24 crc kubenswrapper[4982]: I1205 20:06:24.585378 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2t2qx" podStartSLOduration=3.184537794 podStartE2EDuration="6.585353918s" podCreationTimestamp="2025-12-05 20:06:18 +0000 UTC" firstStartedPulling="2025-12-05 20:06:20.492191425 +0000 UTC m=+3159.374077420" lastFinishedPulling="2025-12-05 20:06:23.893007539 +0000 UTC m=+3162.774893544" observedRunningTime="2025-12-05 20:06:24.57351217 +0000 UTC m=+3163.455398175" watchObservedRunningTime="2025-12-05 20:06:24.585353918 +0000 UTC m=+3163.467239923" Dec 05 20:06:26 crc kubenswrapper[4982]: I1205 20:06:26.636692 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:26 crc kubenswrapper[4982]: I1205 20:06:26.637108 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:26 crc kubenswrapper[4982]: I1205 20:06:26.796033 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:26 crc kubenswrapper[4982]: I1205 20:06:26.796099 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:26 crc kubenswrapper[4982]: I1205 20:06:26.842385 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:27 crc kubenswrapper[4982]: I1205 20:06:27.693420 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-7pvr5" podUID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" containerName="registry-server" probeResult="failure" output=< Dec 05 20:06:27 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Dec 05 20:06:27 crc kubenswrapper[4982]: > Dec 05 20:06:27 crc kubenswrapper[4982]: I1205 20:06:27.831289 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:28 crc kubenswrapper[4982]: I1205 20:06:28.996009 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:28 crc kubenswrapper[4982]: I1205 20:06:28.997095 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:29 crc kubenswrapper[4982]: I1205 20:06:29.069874 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:29 crc kubenswrapper[4982]: I1205 20:06:29.842717 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:29 crc kubenswrapper[4982]: I1205 20:06:29.864212 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x6l8s"] Dec 05 20:06:29 crc kubenswrapper[4982]: I1205 20:06:29.864614 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-x6l8s" podUID="be9e7a8c-d8e1-46aa-b46a-55775db5b335" containerName="registry-server" containerID="cri-o://4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25" gracePeriod=2 Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.713030 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.834616 4982 generic.go:334] "Generic (PLEG): container finished" podID="be9e7a8c-d8e1-46aa-b46a-55775db5b335" containerID="4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25" exitCode=0 Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.835238 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6l8s" event={"ID":"be9e7a8c-d8e1-46aa-b46a-55775db5b335","Type":"ContainerDied","Data":"4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25"} Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.835294 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x6l8s" event={"ID":"be9e7a8c-d8e1-46aa-b46a-55775db5b335","Type":"ContainerDied","Data":"fdcc9751a4ea055f8ddfec0658ce0d80f67df9233cf00f506e3859e9339006fb"} Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.835317 4982 scope.go:117] "RemoveContainer" containerID="4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.835325 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x6l8s" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.849600 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hsw5\" (UniqueName: \"kubernetes.io/projected/be9e7a8c-d8e1-46aa-b46a-55775db5b335-kube-api-access-6hsw5\") pod \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\" (UID: \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\") " Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.849662 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be9e7a8c-d8e1-46aa-b46a-55775db5b335-utilities\") pod \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\" (UID: \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\") " Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.849896 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be9e7a8c-d8e1-46aa-b46a-55775db5b335-catalog-content\") pod \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\" (UID: \"be9e7a8c-d8e1-46aa-b46a-55775db5b335\") " Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.853322 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be9e7a8c-d8e1-46aa-b46a-55775db5b335-utilities" (OuterVolumeSpecName: "utilities") pod "be9e7a8c-d8e1-46aa-b46a-55775db5b335" (UID: "be9e7a8c-d8e1-46aa-b46a-55775db5b335"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.874189 4982 scope.go:117] "RemoveContainer" containerID="d88763153d95a06c1ff5e0d8b958497408268cc78a33bab9ac7b9f79b647afc7" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.888923 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/be9e7a8c-d8e1-46aa-b46a-55775db5b335-kube-api-access-6hsw5" (OuterVolumeSpecName: "kube-api-access-6hsw5") pod "be9e7a8c-d8e1-46aa-b46a-55775db5b335" (UID: "be9e7a8c-d8e1-46aa-b46a-55775db5b335"). InnerVolumeSpecName "kube-api-access-6hsw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.892548 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/be9e7a8c-d8e1-46aa-b46a-55775db5b335-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "be9e7a8c-d8e1-46aa-b46a-55775db5b335" (UID: "be9e7a8c-d8e1-46aa-b46a-55775db5b335"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.945000 4982 scope.go:117] "RemoveContainer" containerID="08eb7fbc220d3278d3dc82e11dc70654400f300c3111635792cf3b720bac369d" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.953407 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/be9e7a8c-d8e1-46aa-b46a-55775db5b335-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.953450 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hsw5\" (UniqueName: \"kubernetes.io/projected/be9e7a8c-d8e1-46aa-b46a-55775db5b335-kube-api-access-6hsw5\") on node \"crc\" DevicePath \"\"" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.953468 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/be9e7a8c-d8e1-46aa-b46a-55775db5b335-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.985102 4982 scope.go:117] "RemoveContainer" containerID="4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25" Dec 05 20:06:30 crc kubenswrapper[4982]: E1205 20:06:30.985594 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25\": container with ID starting with 4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25 not found: ID does not exist" containerID="4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.985630 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25"} err="failed to get container status \"4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25\": rpc error: code = NotFound desc = could not find container \"4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25\": container with ID starting with 4e6bc9536b545ecfa085ee573d87870d629eab38bd3488cb373570684dc65d25 not found: ID does not exist" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.985657 4982 scope.go:117] "RemoveContainer" containerID="d88763153d95a06c1ff5e0d8b958497408268cc78a33bab9ac7b9f79b647afc7" Dec 05 20:06:30 crc kubenswrapper[4982]: E1205 20:06:30.986019 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d88763153d95a06c1ff5e0d8b958497408268cc78a33bab9ac7b9f79b647afc7\": container with ID starting with d88763153d95a06c1ff5e0d8b958497408268cc78a33bab9ac7b9f79b647afc7 not found: ID does not exist" containerID="d88763153d95a06c1ff5e0d8b958497408268cc78a33bab9ac7b9f79b647afc7" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.986049 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d88763153d95a06c1ff5e0d8b958497408268cc78a33bab9ac7b9f79b647afc7"} err="failed to get container status \"d88763153d95a06c1ff5e0d8b958497408268cc78a33bab9ac7b9f79b647afc7\": rpc error: code = NotFound desc = could not find container \"d88763153d95a06c1ff5e0d8b958497408268cc78a33bab9ac7b9f79b647afc7\": container with ID starting with d88763153d95a06c1ff5e0d8b958497408268cc78a33bab9ac7b9f79b647afc7 not found: ID does not exist" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.986075 4982 scope.go:117] "RemoveContainer" containerID="08eb7fbc220d3278d3dc82e11dc70654400f300c3111635792cf3b720bac369d" Dec 05 20:06:30 crc kubenswrapper[4982]: E1205 20:06:30.986533 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08eb7fbc220d3278d3dc82e11dc70654400f300c3111635792cf3b720bac369d\": container with ID starting with 08eb7fbc220d3278d3dc82e11dc70654400f300c3111635792cf3b720bac369d not found: ID does not exist" containerID="08eb7fbc220d3278d3dc82e11dc70654400f300c3111635792cf3b720bac369d" Dec 05 20:06:30 crc kubenswrapper[4982]: I1205 20:06:30.986560 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08eb7fbc220d3278d3dc82e11dc70654400f300c3111635792cf3b720bac369d"} err="failed to get container status \"08eb7fbc220d3278d3dc82e11dc70654400f300c3111635792cf3b720bac369d\": rpc error: code = NotFound desc = could not find container \"08eb7fbc220d3278d3dc82e11dc70654400f300c3111635792cf3b720bac369d\": container with ID starting with 08eb7fbc220d3278d3dc82e11dc70654400f300c3111635792cf3b720bac369d not found: ID does not exist" Dec 05 20:06:31 crc kubenswrapper[4982]: I1205 20:06:31.167072 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x6l8s"] Dec 05 20:06:31 crc kubenswrapper[4982]: I1205 20:06:31.177038 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-x6l8s"] Dec 05 20:06:31 crc kubenswrapper[4982]: I1205 20:06:31.407341 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="be9e7a8c-d8e1-46aa-b46a-55775db5b335" path="/var/lib/kubelet/pods/be9e7a8c-d8e1-46aa-b46a-55775db5b335/volumes" Dec 05 20:06:31 crc kubenswrapper[4982]: I1205 20:06:31.666247 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2t2qx"] Dec 05 20:06:32 crc kubenswrapper[4982]: I1205 20:06:32.853442 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2t2qx" podUID="9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" containerName="registry-server" containerID="cri-o://c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883" gracePeriod=2 Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.570235 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.730968 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-utilities\") pod \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\" (UID: \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\") " Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.731201 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-catalog-content\") pod \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\" (UID: \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\") " Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.731318 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8skb\" (UniqueName: \"kubernetes.io/projected/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-kube-api-access-d8skb\") pod \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\" (UID: \"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14\") " Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.732125 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-utilities" (OuterVolumeSpecName: "utilities") pod "9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" (UID: "9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.736596 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-kube-api-access-d8skb" (OuterVolumeSpecName: "kube-api-access-d8skb") pod "9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" (UID: "9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14"). InnerVolumeSpecName "kube-api-access-d8skb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.788586 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" (UID: "9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.834970 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.835030 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.835047 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8skb\" (UniqueName: \"kubernetes.io/projected/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14-kube-api-access-d8skb\") on node \"crc\" DevicePath \"\"" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.864814 4982 generic.go:334] "Generic (PLEG): container finished" podID="9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" containerID="c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883" exitCode=0 Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.864857 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t2qx" event={"ID":"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14","Type":"ContainerDied","Data":"c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883"} Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.864898 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2t2qx" event={"ID":"9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14","Type":"ContainerDied","Data":"c5774baa890e3b3feb7a95a3c84d062de9972975a287e59e2aa309df9a7193d2"} Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.864906 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2t2qx" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.864916 4982 scope.go:117] "RemoveContainer" containerID="c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.906284 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2t2qx"] Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.909283 4982 scope.go:117] "RemoveContainer" containerID="f8022342a62600dbcb6a17131cbfb992684d8df73d019e10b36bc773176485ce" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.919875 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2t2qx"] Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.936266 4982 scope.go:117] "RemoveContainer" containerID="4ca60a81f870fbb7fcdc9b610fa9947e806e7d8d2d4aaa60bb4c969507a47caf" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.985809 4982 scope.go:117] "RemoveContainer" containerID="c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883" Dec 05 20:06:33 crc kubenswrapper[4982]: E1205 20:06:33.986439 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883\": container with ID starting with c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883 not found: ID does not exist" containerID="c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.986484 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883"} err="failed to get container status \"c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883\": rpc error: code = NotFound desc = could not find container \"c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883\": container with ID starting with c6b106c4dc21ebd3ed25ca513e85f7f549d6452b5bae06283df75b22f8c14883 not found: ID does not exist" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.986527 4982 scope.go:117] "RemoveContainer" containerID="f8022342a62600dbcb6a17131cbfb992684d8df73d019e10b36bc773176485ce" Dec 05 20:06:33 crc kubenswrapper[4982]: E1205 20:06:33.987181 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8022342a62600dbcb6a17131cbfb992684d8df73d019e10b36bc773176485ce\": container with ID starting with f8022342a62600dbcb6a17131cbfb992684d8df73d019e10b36bc773176485ce not found: ID does not exist" containerID="f8022342a62600dbcb6a17131cbfb992684d8df73d019e10b36bc773176485ce" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.987227 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8022342a62600dbcb6a17131cbfb992684d8df73d019e10b36bc773176485ce"} err="failed to get container status \"f8022342a62600dbcb6a17131cbfb992684d8df73d019e10b36bc773176485ce\": rpc error: code = NotFound desc = could not find container \"f8022342a62600dbcb6a17131cbfb992684d8df73d019e10b36bc773176485ce\": container with ID starting with f8022342a62600dbcb6a17131cbfb992684d8df73d019e10b36bc773176485ce not found: ID does not exist" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.987284 4982 scope.go:117] "RemoveContainer" containerID="4ca60a81f870fbb7fcdc9b610fa9947e806e7d8d2d4aaa60bb4c969507a47caf" Dec 05 20:06:33 crc kubenswrapper[4982]: E1205 20:06:33.988193 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ca60a81f870fbb7fcdc9b610fa9947e806e7d8d2d4aaa60bb4c969507a47caf\": container with ID starting with 4ca60a81f870fbb7fcdc9b610fa9947e806e7d8d2d4aaa60bb4c969507a47caf not found: ID does not exist" containerID="4ca60a81f870fbb7fcdc9b610fa9947e806e7d8d2d4aaa60bb4c969507a47caf" Dec 05 20:06:33 crc kubenswrapper[4982]: I1205 20:06:33.988248 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ca60a81f870fbb7fcdc9b610fa9947e806e7d8d2d4aaa60bb4c969507a47caf"} err="failed to get container status \"4ca60a81f870fbb7fcdc9b610fa9947e806e7d8d2d4aaa60bb4c969507a47caf\": rpc error: code = NotFound desc = could not find container \"4ca60a81f870fbb7fcdc9b610fa9947e806e7d8d2d4aaa60bb4c969507a47caf\": container with ID starting with 4ca60a81f870fbb7fcdc9b610fa9947e806e7d8d2d4aaa60bb4c969507a47caf not found: ID does not exist" Dec 05 20:06:35 crc kubenswrapper[4982]: I1205 20:06:35.404629 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" path="/var/lib/kubelet/pods/9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14/volumes" Dec 05 20:06:36 crc kubenswrapper[4982]: I1205 20:06:36.655365 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:36 crc kubenswrapper[4982]: I1205 20:06:36.722759 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:37 crc kubenswrapper[4982]: I1205 20:06:37.062978 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7pvr5"] Dec 05 20:06:37 crc kubenswrapper[4982]: I1205 20:06:37.907742 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7pvr5" podUID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" containerName="registry-server" containerID="cri-o://c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e" gracePeriod=2 Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.390169 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:06:38 crc kubenswrapper[4982]: E1205 20:06:38.391191 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.788443 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.843109 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb6065cb-11b3-46dc-a1af-7116d6a385e8-catalog-content\") pod \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\" (UID: \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\") " Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.843213 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcrxc\" (UniqueName: \"kubernetes.io/projected/eb6065cb-11b3-46dc-a1af-7116d6a385e8-kube-api-access-hcrxc\") pod \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\" (UID: \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\") " Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.843371 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb6065cb-11b3-46dc-a1af-7116d6a385e8-utilities\") pod \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\" (UID: \"eb6065cb-11b3-46dc-a1af-7116d6a385e8\") " Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.846469 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb6065cb-11b3-46dc-a1af-7116d6a385e8-utilities" (OuterVolumeSpecName: "utilities") pod "eb6065cb-11b3-46dc-a1af-7116d6a385e8" (UID: "eb6065cb-11b3-46dc-a1af-7116d6a385e8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.854678 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb6065cb-11b3-46dc-a1af-7116d6a385e8-kube-api-access-hcrxc" (OuterVolumeSpecName: "kube-api-access-hcrxc") pod "eb6065cb-11b3-46dc-a1af-7116d6a385e8" (UID: "eb6065cb-11b3-46dc-a1af-7116d6a385e8"). InnerVolumeSpecName "kube-api-access-hcrxc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.913423 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb6065cb-11b3-46dc-a1af-7116d6a385e8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb6065cb-11b3-46dc-a1af-7116d6a385e8" (UID: "eb6065cb-11b3-46dc-a1af-7116d6a385e8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.932984 4982 generic.go:334] "Generic (PLEG): container finished" podID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" containerID="c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e" exitCode=0 Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.933266 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pvr5" event={"ID":"eb6065cb-11b3-46dc-a1af-7116d6a385e8","Type":"ContainerDied","Data":"c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e"} Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.933436 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7pvr5" event={"ID":"eb6065cb-11b3-46dc-a1af-7116d6a385e8","Type":"ContainerDied","Data":"dc19fe06102c157136304f5266d4ba8a8ec04cc71a209b2231f50f74fe278dc9"} Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.933465 4982 scope.go:117] "RemoveContainer" containerID="c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e" Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.933272 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7pvr5" Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.945568 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb6065cb-11b3-46dc-a1af-7116d6a385e8-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.945596 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcrxc\" (UniqueName: \"kubernetes.io/projected/eb6065cb-11b3-46dc-a1af-7116d6a385e8-kube-api-access-hcrxc\") on node \"crc\" DevicePath \"\"" Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.945605 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb6065cb-11b3-46dc-a1af-7116d6a385e8-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.970778 4982 scope.go:117] "RemoveContainer" containerID="c6ba5219de74a0adbc86abf32d8c90d4a315ce1f5aec593a01358ac16c9241ad" Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.971378 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7pvr5"] Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.981530 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7pvr5"] Dec 05 20:06:38 crc kubenswrapper[4982]: I1205 20:06:38.995391 4982 scope.go:117] "RemoveContainer" containerID="aeac882d757ae9292adf719f8d442927dd4725e43fdc141a96a697f93d4f600f" Dec 05 20:06:39 crc kubenswrapper[4982]: I1205 20:06:39.044927 4982 scope.go:117] "RemoveContainer" containerID="c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e" Dec 05 20:06:39 crc kubenswrapper[4982]: E1205 20:06:39.046784 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e\": container with ID starting with c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e not found: ID does not exist" containerID="c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e" Dec 05 20:06:39 crc kubenswrapper[4982]: I1205 20:06:39.046815 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e"} err="failed to get container status \"c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e\": rpc error: code = NotFound desc = could not find container \"c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e\": container with ID starting with c428e66b12c8cf15fa4bfc240bc57c99732a25b1ad28b6196bdfd13e3bb6db9e not found: ID does not exist" Dec 05 20:06:39 crc kubenswrapper[4982]: I1205 20:06:39.046839 4982 scope.go:117] "RemoveContainer" containerID="c6ba5219de74a0adbc86abf32d8c90d4a315ce1f5aec593a01358ac16c9241ad" Dec 05 20:06:39 crc kubenswrapper[4982]: E1205 20:06:39.047238 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6ba5219de74a0adbc86abf32d8c90d4a315ce1f5aec593a01358ac16c9241ad\": container with ID starting with c6ba5219de74a0adbc86abf32d8c90d4a315ce1f5aec593a01358ac16c9241ad not found: ID does not exist" containerID="c6ba5219de74a0adbc86abf32d8c90d4a315ce1f5aec593a01358ac16c9241ad" Dec 05 20:06:39 crc kubenswrapper[4982]: I1205 20:06:39.047270 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6ba5219de74a0adbc86abf32d8c90d4a315ce1f5aec593a01358ac16c9241ad"} err="failed to get container status \"c6ba5219de74a0adbc86abf32d8c90d4a315ce1f5aec593a01358ac16c9241ad\": rpc error: code = NotFound desc = could not find container \"c6ba5219de74a0adbc86abf32d8c90d4a315ce1f5aec593a01358ac16c9241ad\": container with ID starting with c6ba5219de74a0adbc86abf32d8c90d4a315ce1f5aec593a01358ac16c9241ad not found: ID does not exist" Dec 05 20:06:39 crc kubenswrapper[4982]: I1205 20:06:39.047292 4982 scope.go:117] "RemoveContainer" containerID="aeac882d757ae9292adf719f8d442927dd4725e43fdc141a96a697f93d4f600f" Dec 05 20:06:39 crc kubenswrapper[4982]: E1205 20:06:39.048218 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aeac882d757ae9292adf719f8d442927dd4725e43fdc141a96a697f93d4f600f\": container with ID starting with aeac882d757ae9292adf719f8d442927dd4725e43fdc141a96a697f93d4f600f not found: ID does not exist" containerID="aeac882d757ae9292adf719f8d442927dd4725e43fdc141a96a697f93d4f600f" Dec 05 20:06:39 crc kubenswrapper[4982]: I1205 20:06:39.048244 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeac882d757ae9292adf719f8d442927dd4725e43fdc141a96a697f93d4f600f"} err="failed to get container status \"aeac882d757ae9292adf719f8d442927dd4725e43fdc141a96a697f93d4f600f\": rpc error: code = NotFound desc = could not find container \"aeac882d757ae9292adf719f8d442927dd4725e43fdc141a96a697f93d4f600f\": container with ID starting with aeac882d757ae9292adf719f8d442927dd4725e43fdc141a96a697f93d4f600f not found: ID does not exist" Dec 05 20:06:39 crc kubenswrapper[4982]: I1205 20:06:39.413784 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" path="/var/lib/kubelet/pods/eb6065cb-11b3-46dc-a1af-7116d6a385e8/volumes" Dec 05 20:06:49 crc kubenswrapper[4982]: I1205 20:06:49.390875 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:06:49 crc kubenswrapper[4982]: E1205 20:06:49.391665 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:07:02 crc kubenswrapper[4982]: I1205 20:07:02.391188 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:07:02 crc kubenswrapper[4982]: E1205 20:07:02.391898 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:07:13 crc kubenswrapper[4982]: I1205 20:07:13.392982 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:07:13 crc kubenswrapper[4982]: E1205 20:07:13.393934 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:07:24 crc kubenswrapper[4982]: I1205 20:07:24.390516 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:07:24 crc kubenswrapper[4982]: E1205 20:07:24.391437 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:07:36 crc kubenswrapper[4982]: I1205 20:07:36.390233 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:07:36 crc kubenswrapper[4982]: E1205 20:07:36.391004 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:07:50 crc kubenswrapper[4982]: I1205 20:07:50.391573 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:07:50 crc kubenswrapper[4982]: E1205 20:07:50.392284 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:08:01 crc kubenswrapper[4982]: I1205 20:08:01.397631 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:08:01 crc kubenswrapper[4982]: E1205 20:08:01.398347 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:08:14 crc kubenswrapper[4982]: I1205 20:08:14.390928 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:08:14 crc kubenswrapper[4982]: E1205 20:08:14.391646 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:08:25 crc kubenswrapper[4982]: I1205 20:08:25.391360 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:08:25 crc kubenswrapper[4982]: E1205 20:08:25.392425 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:08:38 crc kubenswrapper[4982]: I1205 20:08:38.390606 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:08:38 crc kubenswrapper[4982]: E1205 20:08:38.391432 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:08:50 crc kubenswrapper[4982]: I1205 20:08:50.390520 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:08:51 crc kubenswrapper[4982]: I1205 20:08:51.356627 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"56457d337403394131ed1ce3ee5674c0b587eb58162b40191049d3e206cb4edf"} Dec 05 20:09:40 crc kubenswrapper[4982]: I1205 20:09:40.843595 4982 generic.go:334] "Generic (PLEG): container finished" podID="d3c6fe00-7794-450c-a588-bd1e2afdbe8c" containerID="5b686aaae7f072938b0d7e61caaf6692e537d67c67f67730bc52968ae362dd69" exitCode=0 Dec 05 20:09:40 crc kubenswrapper[4982]: I1205 20:09:40.843632 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"d3c6fe00-7794-450c-a588-bd1e2afdbe8c","Type":"ContainerDied","Data":"5b686aaae7f072938b0d7e61caaf6692e537d67c67f67730bc52968ae362dd69"} Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.453117 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.521417 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.521527 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-openstack-config-secret\") pod \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.521553 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-config-data\") pod \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.521614 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pnbfx\" (UniqueName: \"kubernetes.io/projected/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-kube-api-access-pnbfx\") pod \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.521635 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-ca-certs\") pod \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.521685 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-test-operator-ephemeral-temporary\") pod \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.521733 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-ssh-key\") pod \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.521752 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-openstack-config\") pod \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.521831 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-test-operator-ephemeral-workdir\") pod \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\" (UID: \"d3c6fe00-7794-450c-a588-bd1e2afdbe8c\") " Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.522834 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-config-data" (OuterVolumeSpecName: "config-data") pod "d3c6fe00-7794-450c-a588-bd1e2afdbe8c" (UID: "d3c6fe00-7794-450c-a588-bd1e2afdbe8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.528836 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "d3c6fe00-7794-450c-a588-bd1e2afdbe8c" (UID: "d3c6fe00-7794-450c-a588-bd1e2afdbe8c"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.538250 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "test-operator-logs") pod "d3c6fe00-7794-450c-a588-bd1e2afdbe8c" (UID: "d3c6fe00-7794-450c-a588-bd1e2afdbe8c"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.549081 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-kube-api-access-pnbfx" (OuterVolumeSpecName: "kube-api-access-pnbfx") pod "d3c6fe00-7794-450c-a588-bd1e2afdbe8c" (UID: "d3c6fe00-7794-450c-a588-bd1e2afdbe8c"). InnerVolumeSpecName "kube-api-access-pnbfx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.585435 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "d3c6fe00-7794-450c-a588-bd1e2afdbe8c" (UID: "d3c6fe00-7794-450c-a588-bd1e2afdbe8c"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.589515 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "d3c6fe00-7794-450c-a588-bd1e2afdbe8c" (UID: "d3c6fe00-7794-450c-a588-bd1e2afdbe8c"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.625523 4982 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.625553 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.625564 4982 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.625572 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pnbfx\" (UniqueName: \"kubernetes.io/projected/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-kube-api-access-pnbfx\") on node \"crc\" DevicePath \"\"" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.625582 4982 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.625591 4982 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.660742 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d3c6fe00-7794-450c-a588-bd1e2afdbe8c" (UID: "d3c6fe00-7794-450c-a588-bd1e2afdbe8c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.667875 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "d3c6fe00-7794-450c-a588-bd1e2afdbe8c" (UID: "d3c6fe00-7794-450c-a588-bd1e2afdbe8c"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.683277 4982 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.727513 4982 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.727547 4982 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.727559 4982 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.867437 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"d3c6fe00-7794-450c-a588-bd1e2afdbe8c","Type":"ContainerDied","Data":"128d73fccdc125c66e43bbd90a22d1fb80ebf168e0713346da88f80ddb0cd2b6"} Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.867500 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="128d73fccdc125c66e43bbd90a22d1fb80ebf168e0713346da88f80ddb0cd2b6" Dec 05 20:09:42 crc kubenswrapper[4982]: I1205 20:09:42.867574 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 20:09:43 crc kubenswrapper[4982]: I1205 20:09:43.059498 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "d3c6fe00-7794-450c-a588-bd1e2afdbe8c" (UID: "d3c6fe00-7794-450c-a588-bd1e2afdbe8c"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:09:43 crc kubenswrapper[4982]: I1205 20:09:43.135279 4982 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/d3c6fe00-7794-450c-a588-bd1e2afdbe8c-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.896205 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 20:09:49 crc kubenswrapper[4982]: E1205 20:09:49.897030 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be9e7a8c-d8e1-46aa-b46a-55775db5b335" containerName="extract-utilities" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897041 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="be9e7a8c-d8e1-46aa-b46a-55775db5b335" containerName="extract-utilities" Dec 05 20:09:49 crc kubenswrapper[4982]: E1205 20:09:49.897049 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" containerName="extract-content" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897056 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" containerName="extract-content" Dec 05 20:09:49 crc kubenswrapper[4982]: E1205 20:09:49.897065 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" containerName="registry-server" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897071 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" containerName="registry-server" Dec 05 20:09:49 crc kubenswrapper[4982]: E1205 20:09:49.897091 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be9e7a8c-d8e1-46aa-b46a-55775db5b335" containerName="registry-server" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897097 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="be9e7a8c-d8e1-46aa-b46a-55775db5b335" containerName="registry-server" Dec 05 20:09:49 crc kubenswrapper[4982]: E1205 20:09:49.897109 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c6fe00-7794-450c-a588-bd1e2afdbe8c" containerName="tempest-tests-tempest-tests-runner" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897116 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c6fe00-7794-450c-a588-bd1e2afdbe8c" containerName="tempest-tests-tempest-tests-runner" Dec 05 20:09:49 crc kubenswrapper[4982]: E1205 20:09:49.897124 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" containerName="extract-utilities" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897130 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" containerName="extract-utilities" Dec 05 20:09:49 crc kubenswrapper[4982]: E1205 20:09:49.897142 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" containerName="extract-utilities" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897173 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" containerName="extract-utilities" Dec 05 20:09:49 crc kubenswrapper[4982]: E1205 20:09:49.897183 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="be9e7a8c-d8e1-46aa-b46a-55775db5b335" containerName="extract-content" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897189 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="be9e7a8c-d8e1-46aa-b46a-55775db5b335" containerName="extract-content" Dec 05 20:09:49 crc kubenswrapper[4982]: E1205 20:09:49.897206 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" containerName="extract-content" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897212 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" containerName="extract-content" Dec 05 20:09:49 crc kubenswrapper[4982]: E1205 20:09:49.897229 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" containerName="registry-server" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897235 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" containerName="registry-server" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897437 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb6065cb-11b3-46dc-a1af-7116d6a385e8" containerName="registry-server" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897449 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c1e3e6a-59d1-4c2c-95f0-ad5554dc0b14" containerName="registry-server" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897456 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3c6fe00-7794-450c-a588-bd1e2afdbe8c" containerName="tempest-tests-tempest-tests-runner" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.897466 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="be9e7a8c-d8e1-46aa-b46a-55775db5b335" containerName="registry-server" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.898140 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.900453 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-lld7m" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.909102 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.976336 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lbtr\" (UniqueName: \"kubernetes.io/projected/f60aebec-8123-4265-b873-fba9eb0f911b-kube-api-access-9lbtr\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f60aebec-8123-4265-b873-fba9eb0f911b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 20:09:49 crc kubenswrapper[4982]: I1205 20:09:49.976380 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f60aebec-8123-4265-b873-fba9eb0f911b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 20:09:50 crc kubenswrapper[4982]: I1205 20:09:50.078029 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lbtr\" (UniqueName: \"kubernetes.io/projected/f60aebec-8123-4265-b873-fba9eb0f911b-kube-api-access-9lbtr\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f60aebec-8123-4265-b873-fba9eb0f911b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 20:09:50 crc kubenswrapper[4982]: I1205 20:09:50.078073 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f60aebec-8123-4265-b873-fba9eb0f911b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 20:09:50 crc kubenswrapper[4982]: I1205 20:09:50.078630 4982 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f60aebec-8123-4265-b873-fba9eb0f911b\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 20:09:50 crc kubenswrapper[4982]: I1205 20:09:50.098104 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lbtr\" (UniqueName: \"kubernetes.io/projected/f60aebec-8123-4265-b873-fba9eb0f911b-kube-api-access-9lbtr\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f60aebec-8123-4265-b873-fba9eb0f911b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 20:09:50 crc kubenswrapper[4982]: I1205 20:09:50.104196 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"f60aebec-8123-4265-b873-fba9eb0f911b\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 20:09:50 crc kubenswrapper[4982]: I1205 20:09:50.232353 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 20:09:50 crc kubenswrapper[4982]: I1205 20:09:50.720709 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 20:09:50 crc kubenswrapper[4982]: I1205 20:09:50.743371 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 20:09:50 crc kubenswrapper[4982]: I1205 20:09:50.950522 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"f60aebec-8123-4265-b873-fba9eb0f911b","Type":"ContainerStarted","Data":"23254b320ddb388d3a64af812be11273c1b9c46cef02726294ff767e710ba0c1"} Dec 05 20:09:51 crc kubenswrapper[4982]: I1205 20:09:51.961841 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"f60aebec-8123-4265-b873-fba9eb0f911b","Type":"ContainerStarted","Data":"0e345cf615fac9daa268dcc79496faba1c0ac54965fd7bb4f1592cb06556d364"} Dec 05 20:09:51 crc kubenswrapper[4982]: I1205 20:09:51.983322 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.084243147 podStartE2EDuration="2.983303395s" podCreationTimestamp="2025-12-05 20:09:49 +0000 UTC" firstStartedPulling="2025-12-05 20:09:50.720515926 +0000 UTC m=+3369.602401921" lastFinishedPulling="2025-12-05 20:09:51.619576174 +0000 UTC m=+3370.501462169" observedRunningTime="2025-12-05 20:09:51.973042269 +0000 UTC m=+3370.854928274" watchObservedRunningTime="2025-12-05 20:09:51.983303395 +0000 UTC m=+3370.865189390" Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.076818 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-b8dln"] Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.081613 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.092977 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b8dln"] Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.133211 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c4e11bb-1c49-461b-88d4-d5071308f7de-catalog-content\") pod \"redhat-operators-b8dln\" (UID: \"9c4e11bb-1c49-461b-88d4-d5071308f7de\") " pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.133443 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c4e11bb-1c49-461b-88d4-d5071308f7de-utilities\") pod \"redhat-operators-b8dln\" (UID: \"9c4e11bb-1c49-461b-88d4-d5071308f7de\") " pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.133562 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2n2kd\" (UniqueName: \"kubernetes.io/projected/9c4e11bb-1c49-461b-88d4-d5071308f7de-kube-api-access-2n2kd\") pod \"redhat-operators-b8dln\" (UID: \"9c4e11bb-1c49-461b-88d4-d5071308f7de\") " pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.235865 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c4e11bb-1c49-461b-88d4-d5071308f7de-catalog-content\") pod \"redhat-operators-b8dln\" (UID: \"9c4e11bb-1c49-461b-88d4-d5071308f7de\") " pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.235922 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c4e11bb-1c49-461b-88d4-d5071308f7de-utilities\") pod \"redhat-operators-b8dln\" (UID: \"9c4e11bb-1c49-461b-88d4-d5071308f7de\") " pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.235977 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2n2kd\" (UniqueName: \"kubernetes.io/projected/9c4e11bb-1c49-461b-88d4-d5071308f7de-kube-api-access-2n2kd\") pod \"redhat-operators-b8dln\" (UID: \"9c4e11bb-1c49-461b-88d4-d5071308f7de\") " pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.237045 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c4e11bb-1c49-461b-88d4-d5071308f7de-catalog-content\") pod \"redhat-operators-b8dln\" (UID: \"9c4e11bb-1c49-461b-88d4-d5071308f7de\") " pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.237233 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c4e11bb-1c49-461b-88d4-d5071308f7de-utilities\") pod \"redhat-operators-b8dln\" (UID: \"9c4e11bb-1c49-461b-88d4-d5071308f7de\") " pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.255488 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2n2kd\" (UniqueName: \"kubernetes.io/projected/9c4e11bb-1c49-461b-88d4-d5071308f7de-kube-api-access-2n2kd\") pod \"redhat-operators-b8dln\" (UID: \"9c4e11bb-1c49-461b-88d4-d5071308f7de\") " pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:09:57 crc kubenswrapper[4982]: I1205 20:09:57.419238 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:09:58 crc kubenswrapper[4982]: I1205 20:09:58.072712 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-b8dln"] Dec 05 20:09:59 crc kubenswrapper[4982]: I1205 20:09:59.031680 4982 generic.go:334] "Generic (PLEG): container finished" podID="9c4e11bb-1c49-461b-88d4-d5071308f7de" containerID="8d9dc76f5b065043fdd76bf08aefa975f18d17f316d6540545b1a883e7da90a6" exitCode=0 Dec 05 20:09:59 crc kubenswrapper[4982]: I1205 20:09:59.031723 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b8dln" event={"ID":"9c4e11bb-1c49-461b-88d4-d5071308f7de","Type":"ContainerDied","Data":"8d9dc76f5b065043fdd76bf08aefa975f18d17f316d6540545b1a883e7da90a6"} Dec 05 20:09:59 crc kubenswrapper[4982]: I1205 20:09:59.031976 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b8dln" event={"ID":"9c4e11bb-1c49-461b-88d4-d5071308f7de","Type":"ContainerStarted","Data":"480a2391435bc35992a8b0165c3f3424f03e27ff94e36fe43d63d0ee579f0f00"} Dec 05 20:10:00 crc kubenswrapper[4982]: I1205 20:10:00.044280 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b8dln" event={"ID":"9c4e11bb-1c49-461b-88d4-d5071308f7de","Type":"ContainerStarted","Data":"e971678ae39286fd3c71f7213a63e39917252da39c77fcd237b5b926479f5e35"} Dec 05 20:10:03 crc kubenswrapper[4982]: I1205 20:10:03.103970 4982 generic.go:334] "Generic (PLEG): container finished" podID="9c4e11bb-1c49-461b-88d4-d5071308f7de" containerID="e971678ae39286fd3c71f7213a63e39917252da39c77fcd237b5b926479f5e35" exitCode=0 Dec 05 20:10:03 crc kubenswrapper[4982]: I1205 20:10:03.104080 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b8dln" event={"ID":"9c4e11bb-1c49-461b-88d4-d5071308f7de","Type":"ContainerDied","Data":"e971678ae39286fd3c71f7213a63e39917252da39c77fcd237b5b926479f5e35"} Dec 05 20:10:04 crc kubenswrapper[4982]: I1205 20:10:04.114298 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b8dln" event={"ID":"9c4e11bb-1c49-461b-88d4-d5071308f7de","Type":"ContainerStarted","Data":"00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c"} Dec 05 20:10:04 crc kubenswrapper[4982]: I1205 20:10:04.152256 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-b8dln" podStartSLOduration=2.683098971 podStartE2EDuration="7.152232451s" podCreationTimestamp="2025-12-05 20:09:57 +0000 UTC" firstStartedPulling="2025-12-05 20:09:59.03374589 +0000 UTC m=+3377.915631885" lastFinishedPulling="2025-12-05 20:10:03.50287937 +0000 UTC m=+3382.384765365" observedRunningTime="2025-12-05 20:10:04.134550157 +0000 UTC m=+3383.016436182" watchObservedRunningTime="2025-12-05 20:10:04.152232451 +0000 UTC m=+3383.034118456" Dec 05 20:10:07 crc kubenswrapper[4982]: I1205 20:10:07.419662 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:10:07 crc kubenswrapper[4982]: I1205 20:10:07.420371 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:10:08 crc kubenswrapper[4982]: I1205 20:10:08.485564 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-b8dln" podUID="9c4e11bb-1c49-461b-88d4-d5071308f7de" containerName="registry-server" probeResult="failure" output=< Dec 05 20:10:08 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Dec 05 20:10:08 crc kubenswrapper[4982]: > Dec 05 20:10:16 crc kubenswrapper[4982]: I1205 20:10:16.330656 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9pdw7/must-gather-r498v"] Dec 05 20:10:16 crc kubenswrapper[4982]: I1205 20:10:16.334591 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/must-gather-r498v" Dec 05 20:10:16 crc kubenswrapper[4982]: I1205 20:10:16.337135 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9pdw7"/"kube-root-ca.crt" Dec 05 20:10:16 crc kubenswrapper[4982]: I1205 20:10:16.337404 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-9pdw7"/"openshift-service-ca.crt" Dec 05 20:10:16 crc kubenswrapper[4982]: I1205 20:10:16.357142 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9pdw7/must-gather-r498v"] Dec 05 20:10:16 crc kubenswrapper[4982]: I1205 20:10:16.529834 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sdps\" (UniqueName: \"kubernetes.io/projected/2a3151fb-529a-4450-9fbf-73b1afccb744-kube-api-access-4sdps\") pod \"must-gather-r498v\" (UID: \"2a3151fb-529a-4450-9fbf-73b1afccb744\") " pod="openshift-must-gather-9pdw7/must-gather-r498v" Dec 05 20:10:16 crc kubenswrapper[4982]: I1205 20:10:16.529975 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2a3151fb-529a-4450-9fbf-73b1afccb744-must-gather-output\") pod \"must-gather-r498v\" (UID: \"2a3151fb-529a-4450-9fbf-73b1afccb744\") " pod="openshift-must-gather-9pdw7/must-gather-r498v" Dec 05 20:10:16 crc kubenswrapper[4982]: I1205 20:10:16.631667 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2a3151fb-529a-4450-9fbf-73b1afccb744-must-gather-output\") pod \"must-gather-r498v\" (UID: \"2a3151fb-529a-4450-9fbf-73b1afccb744\") " pod="openshift-must-gather-9pdw7/must-gather-r498v" Dec 05 20:10:16 crc kubenswrapper[4982]: I1205 20:10:16.631871 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sdps\" (UniqueName: \"kubernetes.io/projected/2a3151fb-529a-4450-9fbf-73b1afccb744-kube-api-access-4sdps\") pod \"must-gather-r498v\" (UID: \"2a3151fb-529a-4450-9fbf-73b1afccb744\") " pod="openshift-must-gather-9pdw7/must-gather-r498v" Dec 05 20:10:16 crc kubenswrapper[4982]: I1205 20:10:16.632215 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2a3151fb-529a-4450-9fbf-73b1afccb744-must-gather-output\") pod \"must-gather-r498v\" (UID: \"2a3151fb-529a-4450-9fbf-73b1afccb744\") " pod="openshift-must-gather-9pdw7/must-gather-r498v" Dec 05 20:10:16 crc kubenswrapper[4982]: I1205 20:10:16.655835 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sdps\" (UniqueName: \"kubernetes.io/projected/2a3151fb-529a-4450-9fbf-73b1afccb744-kube-api-access-4sdps\") pod \"must-gather-r498v\" (UID: \"2a3151fb-529a-4450-9fbf-73b1afccb744\") " pod="openshift-must-gather-9pdw7/must-gather-r498v" Dec 05 20:10:16 crc kubenswrapper[4982]: I1205 20:10:16.671345 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/must-gather-r498v" Dec 05 20:10:17 crc kubenswrapper[4982]: I1205 20:10:17.241374 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-9pdw7/must-gather-r498v"] Dec 05 20:10:17 crc kubenswrapper[4982]: I1205 20:10:17.483637 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:10:17 crc kubenswrapper[4982]: I1205 20:10:17.542085 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:10:18 crc kubenswrapper[4982]: I1205 20:10:18.256114 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9pdw7/must-gather-r498v" event={"ID":"2a3151fb-529a-4450-9fbf-73b1afccb744","Type":"ContainerStarted","Data":"4b761e56f526991a2e0ff45228f1040e95b5a99c79cd3547e75414c633910127"} Dec 05 20:10:18 crc kubenswrapper[4982]: I1205 20:10:18.258452 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b8dln"] Dec 05 20:10:19 crc kubenswrapper[4982]: I1205 20:10:19.264141 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-b8dln" podUID="9c4e11bb-1c49-461b-88d4-d5071308f7de" containerName="registry-server" containerID="cri-o://00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c" gracePeriod=2 Dec 05 20:10:19 crc kubenswrapper[4982]: I1205 20:10:19.945254 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.118438 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2n2kd\" (UniqueName: \"kubernetes.io/projected/9c4e11bb-1c49-461b-88d4-d5071308f7de-kube-api-access-2n2kd\") pod \"9c4e11bb-1c49-461b-88d4-d5071308f7de\" (UID: \"9c4e11bb-1c49-461b-88d4-d5071308f7de\") " Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.118521 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c4e11bb-1c49-461b-88d4-d5071308f7de-utilities\") pod \"9c4e11bb-1c49-461b-88d4-d5071308f7de\" (UID: \"9c4e11bb-1c49-461b-88d4-d5071308f7de\") " Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.118793 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c4e11bb-1c49-461b-88d4-d5071308f7de-catalog-content\") pod \"9c4e11bb-1c49-461b-88d4-d5071308f7de\" (UID: \"9c4e11bb-1c49-461b-88d4-d5071308f7de\") " Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.119648 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c4e11bb-1c49-461b-88d4-d5071308f7de-utilities" (OuterVolumeSpecName: "utilities") pod "9c4e11bb-1c49-461b-88d4-d5071308f7de" (UID: "9c4e11bb-1c49-461b-88d4-d5071308f7de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.126319 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c4e11bb-1c49-461b-88d4-d5071308f7de-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.141407 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c4e11bb-1c49-461b-88d4-d5071308f7de-kube-api-access-2n2kd" (OuterVolumeSpecName: "kube-api-access-2n2kd") pod "9c4e11bb-1c49-461b-88d4-d5071308f7de" (UID: "9c4e11bb-1c49-461b-88d4-d5071308f7de"). InnerVolumeSpecName "kube-api-access-2n2kd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.227926 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2n2kd\" (UniqueName: \"kubernetes.io/projected/9c4e11bb-1c49-461b-88d4-d5071308f7de-kube-api-access-2n2kd\") on node \"crc\" DevicePath \"\"" Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.232038 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c4e11bb-1c49-461b-88d4-d5071308f7de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c4e11bb-1c49-461b-88d4-d5071308f7de" (UID: "9c4e11bb-1c49-461b-88d4-d5071308f7de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.277532 4982 generic.go:334] "Generic (PLEG): container finished" podID="9c4e11bb-1c49-461b-88d4-d5071308f7de" containerID="00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c" exitCode=0 Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.277585 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b8dln" event={"ID":"9c4e11bb-1c49-461b-88d4-d5071308f7de","Type":"ContainerDied","Data":"00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c"} Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.277633 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-b8dln" event={"ID":"9c4e11bb-1c49-461b-88d4-d5071308f7de","Type":"ContainerDied","Data":"480a2391435bc35992a8b0165c3f3424f03e27ff94e36fe43d63d0ee579f0f00"} Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.277651 4982 scope.go:117] "RemoveContainer" containerID="00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c" Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.277707 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-b8dln" Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.322287 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-b8dln"] Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.330178 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c4e11bb-1c49-461b-88d4-d5071308f7de-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 20:10:20 crc kubenswrapper[4982]: I1205 20:10:20.331188 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-b8dln"] Dec 05 20:10:21 crc kubenswrapper[4982]: I1205 20:10:21.403267 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c4e11bb-1c49-461b-88d4-d5071308f7de" path="/var/lib/kubelet/pods/9c4e11bb-1c49-461b-88d4-d5071308f7de/volumes" Dec 05 20:10:22 crc kubenswrapper[4982]: I1205 20:10:22.030241 4982 scope.go:117] "RemoveContainer" containerID="e971678ae39286fd3c71f7213a63e39917252da39c77fcd237b5b926479f5e35" Dec 05 20:10:22 crc kubenswrapper[4982]: I1205 20:10:22.080128 4982 scope.go:117] "RemoveContainer" containerID="8d9dc76f5b065043fdd76bf08aefa975f18d17f316d6540545b1a883e7da90a6" Dec 05 20:10:22 crc kubenswrapper[4982]: I1205 20:10:22.134118 4982 scope.go:117] "RemoveContainer" containerID="00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c" Dec 05 20:10:22 crc kubenswrapper[4982]: E1205 20:10:22.134633 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c\": container with ID starting with 00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c not found: ID does not exist" containerID="00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c" Dec 05 20:10:22 crc kubenswrapper[4982]: I1205 20:10:22.134661 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c"} err="failed to get container status \"00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c\": rpc error: code = NotFound desc = could not find container \"00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c\": container with ID starting with 00c5398a486dc9b0ceb75faeb79a5b7b6121db0bac31a27deb12ccf7f44cab8c not found: ID does not exist" Dec 05 20:10:22 crc kubenswrapper[4982]: I1205 20:10:22.134682 4982 scope.go:117] "RemoveContainer" containerID="e971678ae39286fd3c71f7213a63e39917252da39c77fcd237b5b926479f5e35" Dec 05 20:10:22 crc kubenswrapper[4982]: E1205 20:10:22.135129 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e971678ae39286fd3c71f7213a63e39917252da39c77fcd237b5b926479f5e35\": container with ID starting with e971678ae39286fd3c71f7213a63e39917252da39c77fcd237b5b926479f5e35 not found: ID does not exist" containerID="e971678ae39286fd3c71f7213a63e39917252da39c77fcd237b5b926479f5e35" Dec 05 20:10:22 crc kubenswrapper[4982]: I1205 20:10:22.135202 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e971678ae39286fd3c71f7213a63e39917252da39c77fcd237b5b926479f5e35"} err="failed to get container status \"e971678ae39286fd3c71f7213a63e39917252da39c77fcd237b5b926479f5e35\": rpc error: code = NotFound desc = could not find container \"e971678ae39286fd3c71f7213a63e39917252da39c77fcd237b5b926479f5e35\": container with ID starting with e971678ae39286fd3c71f7213a63e39917252da39c77fcd237b5b926479f5e35 not found: ID does not exist" Dec 05 20:10:22 crc kubenswrapper[4982]: I1205 20:10:22.135242 4982 scope.go:117] "RemoveContainer" containerID="8d9dc76f5b065043fdd76bf08aefa975f18d17f316d6540545b1a883e7da90a6" Dec 05 20:10:22 crc kubenswrapper[4982]: E1205 20:10:22.135637 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d9dc76f5b065043fdd76bf08aefa975f18d17f316d6540545b1a883e7da90a6\": container with ID starting with 8d9dc76f5b065043fdd76bf08aefa975f18d17f316d6540545b1a883e7da90a6 not found: ID does not exist" containerID="8d9dc76f5b065043fdd76bf08aefa975f18d17f316d6540545b1a883e7da90a6" Dec 05 20:10:22 crc kubenswrapper[4982]: I1205 20:10:22.135662 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d9dc76f5b065043fdd76bf08aefa975f18d17f316d6540545b1a883e7da90a6"} err="failed to get container status \"8d9dc76f5b065043fdd76bf08aefa975f18d17f316d6540545b1a883e7da90a6\": rpc error: code = NotFound desc = could not find container \"8d9dc76f5b065043fdd76bf08aefa975f18d17f316d6540545b1a883e7da90a6\": container with ID starting with 8d9dc76f5b065043fdd76bf08aefa975f18d17f316d6540545b1a883e7da90a6 not found: ID does not exist" Dec 05 20:10:23 crc kubenswrapper[4982]: I1205 20:10:23.314473 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9pdw7/must-gather-r498v" event={"ID":"2a3151fb-529a-4450-9fbf-73b1afccb744","Type":"ContainerStarted","Data":"7bab05588e81b3c6205d0db06aafb687ed71a77c8ef4e42eeda4d301c84f2a01"} Dec 05 20:10:23 crc kubenswrapper[4982]: I1205 20:10:23.315031 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9pdw7/must-gather-r498v" event={"ID":"2a3151fb-529a-4450-9fbf-73b1afccb744","Type":"ContainerStarted","Data":"7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3"} Dec 05 20:10:23 crc kubenswrapper[4982]: I1205 20:10:23.333830 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9pdw7/must-gather-r498v" podStartSLOduration=2.492516289 podStartE2EDuration="7.333809784s" podCreationTimestamp="2025-12-05 20:10:16 +0000 UTC" firstStartedPulling="2025-12-05 20:10:17.250707594 +0000 UTC m=+3396.132593589" lastFinishedPulling="2025-12-05 20:10:22.092001089 +0000 UTC m=+3400.973887084" observedRunningTime="2025-12-05 20:10:23.332310618 +0000 UTC m=+3402.214196623" watchObservedRunningTime="2025-12-05 20:10:23.333809784 +0000 UTC m=+3402.215695789" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.080244 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9pdw7/crc-debug-xr2m2"] Dec 05 20:10:26 crc kubenswrapper[4982]: E1205 20:10:26.082176 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c4e11bb-1c49-461b-88d4-d5071308f7de" containerName="registry-server" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.082476 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c4e11bb-1c49-461b-88d4-d5071308f7de" containerName="registry-server" Dec 05 20:10:26 crc kubenswrapper[4982]: E1205 20:10:26.082600 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c4e11bb-1c49-461b-88d4-d5071308f7de" containerName="extract-content" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.082677 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c4e11bb-1c49-461b-88d4-d5071308f7de" containerName="extract-content" Dec 05 20:10:26 crc kubenswrapper[4982]: E1205 20:10:26.082789 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c4e11bb-1c49-461b-88d4-d5071308f7de" containerName="extract-utilities" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.082870 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c4e11bb-1c49-461b-88d4-d5071308f7de" containerName="extract-utilities" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.083239 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c4e11bb-1c49-461b-88d4-d5071308f7de" containerName="registry-server" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.084456 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.086908 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9pdw7"/"default-dockercfg-zw27k" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.165736 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1ae19670-cdd2-4438-babd-3adafb3e71e4-host\") pod \"crc-debug-xr2m2\" (UID: \"1ae19670-cdd2-4438-babd-3adafb3e71e4\") " pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.165773 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x75j\" (UniqueName: \"kubernetes.io/projected/1ae19670-cdd2-4438-babd-3adafb3e71e4-kube-api-access-6x75j\") pod \"crc-debug-xr2m2\" (UID: \"1ae19670-cdd2-4438-babd-3adafb3e71e4\") " pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.267459 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1ae19670-cdd2-4438-babd-3adafb3e71e4-host\") pod \"crc-debug-xr2m2\" (UID: \"1ae19670-cdd2-4438-babd-3adafb3e71e4\") " pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.267502 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x75j\" (UniqueName: \"kubernetes.io/projected/1ae19670-cdd2-4438-babd-3adafb3e71e4-kube-api-access-6x75j\") pod \"crc-debug-xr2m2\" (UID: \"1ae19670-cdd2-4438-babd-3adafb3e71e4\") " pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.267552 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1ae19670-cdd2-4438-babd-3adafb3e71e4-host\") pod \"crc-debug-xr2m2\" (UID: \"1ae19670-cdd2-4438-babd-3adafb3e71e4\") " pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.287367 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x75j\" (UniqueName: \"kubernetes.io/projected/1ae19670-cdd2-4438-babd-3adafb3e71e4-kube-api-access-6x75j\") pod \"crc-debug-xr2m2\" (UID: \"1ae19670-cdd2-4438-babd-3adafb3e71e4\") " pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" Dec 05 20:10:26 crc kubenswrapper[4982]: I1205 20:10:26.405558 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" Dec 05 20:10:26 crc kubenswrapper[4982]: W1205 20:10:26.455994 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ae19670_cdd2_4438_babd_3adafb3e71e4.slice/crio-3cde8536ed7e4c563210166491dd79194a8afb62c7d81a55df91a658a8823270 WatchSource:0}: Error finding container 3cde8536ed7e4c563210166491dd79194a8afb62c7d81a55df91a658a8823270: Status 404 returned error can't find the container with id 3cde8536ed7e4c563210166491dd79194a8afb62c7d81a55df91a658a8823270 Dec 05 20:10:27 crc kubenswrapper[4982]: I1205 20:10:27.366033 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" event={"ID":"1ae19670-cdd2-4438-babd-3adafb3e71e4","Type":"ContainerStarted","Data":"3cde8536ed7e4c563210166491dd79194a8afb62c7d81a55df91a658a8823270"} Dec 05 20:10:38 crc kubenswrapper[4982]: I1205 20:10:38.503981 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" event={"ID":"1ae19670-cdd2-4438-babd-3adafb3e71e4","Type":"ContainerStarted","Data":"90b30989b859cfcb4002c21113e746942aa8ff8f0dcb10d28a4b123bb5103b24"} Dec 05 20:10:38 crc kubenswrapper[4982]: I1205 20:10:38.517282 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" podStartSLOduration=0.757731003 podStartE2EDuration="12.517265562s" podCreationTimestamp="2025-12-05 20:10:26 +0000 UTC" firstStartedPulling="2025-12-05 20:10:26.458885867 +0000 UTC m=+3405.340771862" lastFinishedPulling="2025-12-05 20:10:38.218420416 +0000 UTC m=+3417.100306421" observedRunningTime="2025-12-05 20:10:38.516424362 +0000 UTC m=+3417.398310357" watchObservedRunningTime="2025-12-05 20:10:38.517265562 +0000 UTC m=+3417.399151547" Dec 05 20:11:12 crc kubenswrapper[4982]: I1205 20:11:12.558846 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:11:12 crc kubenswrapper[4982]: I1205 20:11:12.559448 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:11:23 crc kubenswrapper[4982]: I1205 20:11:23.934681 4982 generic.go:334] "Generic (PLEG): container finished" podID="1ae19670-cdd2-4438-babd-3adafb3e71e4" containerID="90b30989b859cfcb4002c21113e746942aa8ff8f0dcb10d28a4b123bb5103b24" exitCode=0 Dec 05 20:11:23 crc kubenswrapper[4982]: I1205 20:11:23.934770 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" event={"ID":"1ae19670-cdd2-4438-babd-3adafb3e71e4","Type":"ContainerDied","Data":"90b30989b859cfcb4002c21113e746942aa8ff8f0dcb10d28a4b123bb5103b24"} Dec 05 20:11:25 crc kubenswrapper[4982]: I1205 20:11:25.073436 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" Dec 05 20:11:25 crc kubenswrapper[4982]: I1205 20:11:25.114051 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9pdw7/crc-debug-xr2m2"] Dec 05 20:11:25 crc kubenswrapper[4982]: I1205 20:11:25.130896 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9pdw7/crc-debug-xr2m2"] Dec 05 20:11:25 crc kubenswrapper[4982]: I1205 20:11:25.237042 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1ae19670-cdd2-4438-babd-3adafb3e71e4-host\") pod \"1ae19670-cdd2-4438-babd-3adafb3e71e4\" (UID: \"1ae19670-cdd2-4438-babd-3adafb3e71e4\") " Dec 05 20:11:25 crc kubenswrapper[4982]: I1205 20:11:25.237100 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6x75j\" (UniqueName: \"kubernetes.io/projected/1ae19670-cdd2-4438-babd-3adafb3e71e4-kube-api-access-6x75j\") pod \"1ae19670-cdd2-4438-babd-3adafb3e71e4\" (UID: \"1ae19670-cdd2-4438-babd-3adafb3e71e4\") " Dec 05 20:11:25 crc kubenswrapper[4982]: I1205 20:11:25.237134 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ae19670-cdd2-4438-babd-3adafb3e71e4-host" (OuterVolumeSpecName: "host") pod "1ae19670-cdd2-4438-babd-3adafb3e71e4" (UID: "1ae19670-cdd2-4438-babd-3adafb3e71e4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 20:11:25 crc kubenswrapper[4982]: I1205 20:11:25.237819 4982 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1ae19670-cdd2-4438-babd-3adafb3e71e4-host\") on node \"crc\" DevicePath \"\"" Dec 05 20:11:25 crc kubenswrapper[4982]: I1205 20:11:25.243201 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ae19670-cdd2-4438-babd-3adafb3e71e4-kube-api-access-6x75j" (OuterVolumeSpecName: "kube-api-access-6x75j") pod "1ae19670-cdd2-4438-babd-3adafb3e71e4" (UID: "1ae19670-cdd2-4438-babd-3adafb3e71e4"). InnerVolumeSpecName "kube-api-access-6x75j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:11:25 crc kubenswrapper[4982]: I1205 20:11:25.339648 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6x75j\" (UniqueName: \"kubernetes.io/projected/1ae19670-cdd2-4438-babd-3adafb3e71e4-kube-api-access-6x75j\") on node \"crc\" DevicePath \"\"" Dec 05 20:11:25 crc kubenswrapper[4982]: I1205 20:11:25.404897 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ae19670-cdd2-4438-babd-3adafb3e71e4" path="/var/lib/kubelet/pods/1ae19670-cdd2-4438-babd-3adafb3e71e4/volumes" Dec 05 20:11:25 crc kubenswrapper[4982]: I1205 20:11:25.959624 4982 scope.go:117] "RemoveContainer" containerID="90b30989b859cfcb4002c21113e746942aa8ff8f0dcb10d28a4b123bb5103b24" Dec 05 20:11:25 crc kubenswrapper[4982]: I1205 20:11:25.959686 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/crc-debug-xr2m2" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.302727 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9pdw7/crc-debug-w5vnt"] Dec 05 20:11:26 crc kubenswrapper[4982]: E1205 20:11:26.303271 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ae19670-cdd2-4438-babd-3adafb3e71e4" containerName="container-00" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.303285 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ae19670-cdd2-4438-babd-3adafb3e71e4" containerName="container-00" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.303507 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ae19670-cdd2-4438-babd-3adafb3e71e4" containerName="container-00" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.304278 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.306768 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9pdw7"/"default-dockercfg-zw27k" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.465534 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4c7de5f1-4c71-404b-ab79-8971b8d64c91-host\") pod \"crc-debug-w5vnt\" (UID: \"4c7de5f1-4c71-404b-ab79-8971b8d64c91\") " pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.465792 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dqmb\" (UniqueName: \"kubernetes.io/projected/4c7de5f1-4c71-404b-ab79-8971b8d64c91-kube-api-access-7dqmb\") pod \"crc-debug-w5vnt\" (UID: \"4c7de5f1-4c71-404b-ab79-8971b8d64c91\") " pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.567593 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4c7de5f1-4c71-404b-ab79-8971b8d64c91-host\") pod \"crc-debug-w5vnt\" (UID: \"4c7de5f1-4c71-404b-ab79-8971b8d64c91\") " pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.567987 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dqmb\" (UniqueName: \"kubernetes.io/projected/4c7de5f1-4c71-404b-ab79-8971b8d64c91-kube-api-access-7dqmb\") pod \"crc-debug-w5vnt\" (UID: \"4c7de5f1-4c71-404b-ab79-8971b8d64c91\") " pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.568182 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4c7de5f1-4c71-404b-ab79-8971b8d64c91-host\") pod \"crc-debug-w5vnt\" (UID: \"4c7de5f1-4c71-404b-ab79-8971b8d64c91\") " pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.589084 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dqmb\" (UniqueName: \"kubernetes.io/projected/4c7de5f1-4c71-404b-ab79-8971b8d64c91-kube-api-access-7dqmb\") pod \"crc-debug-w5vnt\" (UID: \"4c7de5f1-4c71-404b-ab79-8971b8d64c91\") " pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.622673 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.970372 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" event={"ID":"4c7de5f1-4c71-404b-ab79-8971b8d64c91","Type":"ContainerStarted","Data":"38af96101184245d565a71bab4a38a10a6b1260a949e5e30a3fd7a8228d8b501"} Dec 05 20:11:26 crc kubenswrapper[4982]: I1205 20:11:26.970654 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" event={"ID":"4c7de5f1-4c71-404b-ab79-8971b8d64c91","Type":"ContainerStarted","Data":"4522db155c4492a48b7295153a141b73fc7986f16b2b86fa8b0a96da9e191a2d"} Dec 05 20:11:27 crc kubenswrapper[4982]: I1205 20:11:27.000796 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" podStartSLOduration=1.000777336 podStartE2EDuration="1.000777336s" podCreationTimestamp="2025-12-05 20:11:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 20:11:26.98311683 +0000 UTC m=+3465.865002825" watchObservedRunningTime="2025-12-05 20:11:27.000777336 +0000 UTC m=+3465.882663331" Dec 05 20:11:28 crc kubenswrapper[4982]: I1205 20:11:28.016697 4982 generic.go:334] "Generic (PLEG): container finished" podID="4c7de5f1-4c71-404b-ab79-8971b8d64c91" containerID="38af96101184245d565a71bab4a38a10a6b1260a949e5e30a3fd7a8228d8b501" exitCode=0 Dec 05 20:11:28 crc kubenswrapper[4982]: I1205 20:11:28.017031 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" event={"ID":"4c7de5f1-4c71-404b-ab79-8971b8d64c91","Type":"ContainerDied","Data":"38af96101184245d565a71bab4a38a10a6b1260a949e5e30a3fd7a8228d8b501"} Dec 05 20:11:29 crc kubenswrapper[4982]: I1205 20:11:29.155588 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" Dec 05 20:11:29 crc kubenswrapper[4982]: I1205 20:11:29.238661 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9pdw7/crc-debug-w5vnt"] Dec 05 20:11:29 crc kubenswrapper[4982]: I1205 20:11:29.256364 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9pdw7/crc-debug-w5vnt"] Dec 05 20:11:29 crc kubenswrapper[4982]: I1205 20:11:29.333576 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7dqmb\" (UniqueName: \"kubernetes.io/projected/4c7de5f1-4c71-404b-ab79-8971b8d64c91-kube-api-access-7dqmb\") pod \"4c7de5f1-4c71-404b-ab79-8971b8d64c91\" (UID: \"4c7de5f1-4c71-404b-ab79-8971b8d64c91\") " Dec 05 20:11:29 crc kubenswrapper[4982]: I1205 20:11:29.333717 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4c7de5f1-4c71-404b-ab79-8971b8d64c91-host\") pod \"4c7de5f1-4c71-404b-ab79-8971b8d64c91\" (UID: \"4c7de5f1-4c71-404b-ab79-8971b8d64c91\") " Dec 05 20:11:29 crc kubenswrapper[4982]: I1205 20:11:29.333837 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4c7de5f1-4c71-404b-ab79-8971b8d64c91-host" (OuterVolumeSpecName: "host") pod "4c7de5f1-4c71-404b-ab79-8971b8d64c91" (UID: "4c7de5f1-4c71-404b-ab79-8971b8d64c91"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 20:11:29 crc kubenswrapper[4982]: I1205 20:11:29.334071 4982 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/4c7de5f1-4c71-404b-ab79-8971b8d64c91-host\") on node \"crc\" DevicePath \"\"" Dec 05 20:11:29 crc kubenswrapper[4982]: I1205 20:11:29.346846 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c7de5f1-4c71-404b-ab79-8971b8d64c91-kube-api-access-7dqmb" (OuterVolumeSpecName: "kube-api-access-7dqmb") pod "4c7de5f1-4c71-404b-ab79-8971b8d64c91" (UID: "4c7de5f1-4c71-404b-ab79-8971b8d64c91"). InnerVolumeSpecName "kube-api-access-7dqmb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:11:29 crc kubenswrapper[4982]: I1205 20:11:29.400467 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c7de5f1-4c71-404b-ab79-8971b8d64c91" path="/var/lib/kubelet/pods/4c7de5f1-4c71-404b-ab79-8971b8d64c91/volumes" Dec 05 20:11:29 crc kubenswrapper[4982]: I1205 20:11:29.437031 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7dqmb\" (UniqueName: \"kubernetes.io/projected/4c7de5f1-4c71-404b-ab79-8971b8d64c91-kube-api-access-7dqmb\") on node \"crc\" DevicePath \"\"" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.040474 4982 scope.go:117] "RemoveContainer" containerID="38af96101184245d565a71bab4a38a10a6b1260a949e5e30a3fd7a8228d8b501" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.040736 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/crc-debug-w5vnt" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.457231 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-9pdw7/crc-debug-lzlbk"] Dec 05 20:11:30 crc kubenswrapper[4982]: E1205 20:11:30.457972 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c7de5f1-4c71-404b-ab79-8971b8d64c91" containerName="container-00" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.457987 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c7de5f1-4c71-404b-ab79-8971b8d64c91" containerName="container-00" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.458257 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c7de5f1-4c71-404b-ab79-8971b8d64c91" containerName="container-00" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.460018 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/crc-debug-lzlbk" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.462343 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-9pdw7"/"default-dockercfg-zw27k" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.557947 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1542f7d8-66d3-45e5-b42f-54438b594db1-host\") pod \"crc-debug-lzlbk\" (UID: \"1542f7d8-66d3-45e5-b42f-54438b594db1\") " pod="openshift-must-gather-9pdw7/crc-debug-lzlbk" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.558010 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-559jx\" (UniqueName: \"kubernetes.io/projected/1542f7d8-66d3-45e5-b42f-54438b594db1-kube-api-access-559jx\") pod \"crc-debug-lzlbk\" (UID: \"1542f7d8-66d3-45e5-b42f-54438b594db1\") " pod="openshift-must-gather-9pdw7/crc-debug-lzlbk" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.659984 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1542f7d8-66d3-45e5-b42f-54438b594db1-host\") pod \"crc-debug-lzlbk\" (UID: \"1542f7d8-66d3-45e5-b42f-54438b594db1\") " pod="openshift-must-gather-9pdw7/crc-debug-lzlbk" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.660035 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-559jx\" (UniqueName: \"kubernetes.io/projected/1542f7d8-66d3-45e5-b42f-54438b594db1-kube-api-access-559jx\") pod \"crc-debug-lzlbk\" (UID: \"1542f7d8-66d3-45e5-b42f-54438b594db1\") " pod="openshift-must-gather-9pdw7/crc-debug-lzlbk" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.660117 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1542f7d8-66d3-45e5-b42f-54438b594db1-host\") pod \"crc-debug-lzlbk\" (UID: \"1542f7d8-66d3-45e5-b42f-54438b594db1\") " pod="openshift-must-gather-9pdw7/crc-debug-lzlbk" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.677370 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-559jx\" (UniqueName: \"kubernetes.io/projected/1542f7d8-66d3-45e5-b42f-54438b594db1-kube-api-access-559jx\") pod \"crc-debug-lzlbk\" (UID: \"1542f7d8-66d3-45e5-b42f-54438b594db1\") " pod="openshift-must-gather-9pdw7/crc-debug-lzlbk" Dec 05 20:11:30 crc kubenswrapper[4982]: I1205 20:11:30.778948 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/crc-debug-lzlbk" Dec 05 20:11:30 crc kubenswrapper[4982]: W1205 20:11:30.808647 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1542f7d8_66d3_45e5_b42f_54438b594db1.slice/crio-65ef7a38681a5c03a392af0998ea236c4e19dedeba7b42f4ae7b98e14855c4ab WatchSource:0}: Error finding container 65ef7a38681a5c03a392af0998ea236c4e19dedeba7b42f4ae7b98e14855c4ab: Status 404 returned error can't find the container with id 65ef7a38681a5c03a392af0998ea236c4e19dedeba7b42f4ae7b98e14855c4ab Dec 05 20:11:31 crc kubenswrapper[4982]: I1205 20:11:31.055803 4982 generic.go:334] "Generic (PLEG): container finished" podID="1542f7d8-66d3-45e5-b42f-54438b594db1" containerID="f53ddf9245c3f109eeadc8a9b76d4f34d70b106cc32f3aa99ddb36c748daabf3" exitCode=0 Dec 05 20:11:31 crc kubenswrapper[4982]: I1205 20:11:31.055854 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9pdw7/crc-debug-lzlbk" event={"ID":"1542f7d8-66d3-45e5-b42f-54438b594db1","Type":"ContainerDied","Data":"f53ddf9245c3f109eeadc8a9b76d4f34d70b106cc32f3aa99ddb36c748daabf3"} Dec 05 20:11:31 crc kubenswrapper[4982]: I1205 20:11:31.056115 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9pdw7/crc-debug-lzlbk" event={"ID":"1542f7d8-66d3-45e5-b42f-54438b594db1","Type":"ContainerStarted","Data":"65ef7a38681a5c03a392af0998ea236c4e19dedeba7b42f4ae7b98e14855c4ab"} Dec 05 20:11:31 crc kubenswrapper[4982]: I1205 20:11:31.096522 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9pdw7/crc-debug-lzlbk"] Dec 05 20:11:31 crc kubenswrapper[4982]: I1205 20:11:31.109012 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9pdw7/crc-debug-lzlbk"] Dec 05 20:11:32 crc kubenswrapper[4982]: I1205 20:11:32.189030 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/crc-debug-lzlbk" Dec 05 20:11:32 crc kubenswrapper[4982]: I1205 20:11:32.192372 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1542f7d8-66d3-45e5-b42f-54438b594db1-host\") pod \"1542f7d8-66d3-45e5-b42f-54438b594db1\" (UID: \"1542f7d8-66d3-45e5-b42f-54438b594db1\") " Dec 05 20:11:32 crc kubenswrapper[4982]: I1205 20:11:32.192559 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1542f7d8-66d3-45e5-b42f-54438b594db1-host" (OuterVolumeSpecName: "host") pod "1542f7d8-66d3-45e5-b42f-54438b594db1" (UID: "1542f7d8-66d3-45e5-b42f-54438b594db1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 20:11:32 crc kubenswrapper[4982]: I1205 20:11:32.192580 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-559jx\" (UniqueName: \"kubernetes.io/projected/1542f7d8-66d3-45e5-b42f-54438b594db1-kube-api-access-559jx\") pod \"1542f7d8-66d3-45e5-b42f-54438b594db1\" (UID: \"1542f7d8-66d3-45e5-b42f-54438b594db1\") " Dec 05 20:11:32 crc kubenswrapper[4982]: I1205 20:11:32.193615 4982 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/1542f7d8-66d3-45e5-b42f-54438b594db1-host\") on node \"crc\" DevicePath \"\"" Dec 05 20:11:32 crc kubenswrapper[4982]: I1205 20:11:32.200218 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1542f7d8-66d3-45e5-b42f-54438b594db1-kube-api-access-559jx" (OuterVolumeSpecName: "kube-api-access-559jx") pod "1542f7d8-66d3-45e5-b42f-54438b594db1" (UID: "1542f7d8-66d3-45e5-b42f-54438b594db1"). InnerVolumeSpecName "kube-api-access-559jx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:11:32 crc kubenswrapper[4982]: I1205 20:11:32.295610 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-559jx\" (UniqueName: \"kubernetes.io/projected/1542f7d8-66d3-45e5-b42f-54438b594db1-kube-api-access-559jx\") on node \"crc\" DevicePath \"\"" Dec 05 20:11:33 crc kubenswrapper[4982]: I1205 20:11:33.086947 4982 scope.go:117] "RemoveContainer" containerID="f53ddf9245c3f109eeadc8a9b76d4f34d70b106cc32f3aa99ddb36c748daabf3" Dec 05 20:11:33 crc kubenswrapper[4982]: I1205 20:11:33.087331 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/crc-debug-lzlbk" Dec 05 20:11:33 crc kubenswrapper[4982]: I1205 20:11:33.423013 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1542f7d8-66d3-45e5-b42f-54438b594db1" path="/var/lib/kubelet/pods/1542f7d8-66d3-45e5-b42f-54438b594db1/volumes" Dec 05 20:11:42 crc kubenswrapper[4982]: I1205 20:11:42.557493 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:11:42 crc kubenswrapper[4982]: I1205 20:11:42.558432 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:11:56 crc kubenswrapper[4982]: I1205 20:11:56.276850 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_4e3679d1-2b65-494e-bc5f-2a68697da816/init-config-reloader/0.log" Dec 05 20:11:56 crc kubenswrapper[4982]: I1205 20:11:56.433580 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_4e3679d1-2b65-494e-bc5f-2a68697da816/init-config-reloader/0.log" Dec 05 20:11:56 crc kubenswrapper[4982]: I1205 20:11:56.477629 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_4e3679d1-2b65-494e-bc5f-2a68697da816/config-reloader/0.log" Dec 05 20:11:56 crc kubenswrapper[4982]: I1205 20:11:56.498845 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_4e3679d1-2b65-494e-bc5f-2a68697da816/alertmanager/0.log" Dec 05 20:11:56 crc kubenswrapper[4982]: I1205 20:11:56.634570 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6fbc9dfdf4-kq8zv_cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb/barbican-api/0.log" Dec 05 20:11:56 crc kubenswrapper[4982]: I1205 20:11:56.672756 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6fbc9dfdf4-kq8zv_cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb/barbican-api-log/0.log" Dec 05 20:11:57 crc kubenswrapper[4982]: I1205 20:11:57.100044 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d7dcb9f9d-rc2ld_f129356d-d3c2-4fc0-856e-2310b4c29996/barbican-keystone-listener-log/0.log" Dec 05 20:11:57 crc kubenswrapper[4982]: I1205 20:11:57.134544 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-d99845895-c6s44_3eb20c0f-b133-4aab-a43a-22dab1ae0630/barbican-worker/0.log" Dec 05 20:11:57 crc kubenswrapper[4982]: I1205 20:11:57.136921 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d7dcb9f9d-rc2ld_f129356d-d3c2-4fc0-856e-2310b4c29996/barbican-keystone-listener/0.log" Dec 05 20:11:57 crc kubenswrapper[4982]: I1205 20:11:57.314495 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-d99845895-c6s44_3eb20c0f-b133-4aab-a43a-22dab1ae0630/barbican-worker-log/0.log" Dec 05 20:11:57 crc kubenswrapper[4982]: I1205 20:11:57.348497 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt_3cd634f3-b987-404a-a10b-609341e2b548/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:11:57 crc kubenswrapper[4982]: I1205 20:11:57.607804 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fdfbdc81-b68b-414e-af43-2f44719ca203/ceilometer-notification-agent/0.log" Dec 05 20:11:57 crc kubenswrapper[4982]: I1205 20:11:57.617962 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fdfbdc81-b68b-414e-af43-2f44719ca203/ceilometer-central-agent/0.log" Dec 05 20:11:57 crc kubenswrapper[4982]: I1205 20:11:57.637681 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fdfbdc81-b68b-414e-af43-2f44719ca203/proxy-httpd/0.log" Dec 05 20:11:57 crc kubenswrapper[4982]: I1205 20:11:57.683186 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fdfbdc81-b68b-414e-af43-2f44719ca203/sg-core/0.log" Dec 05 20:11:57 crc kubenswrapper[4982]: I1205 20:11:57.859258 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_53a46b29-95f9-43a2-8d2a-770693317314/cinder-api/0.log" Dec 05 20:11:57 crc kubenswrapper[4982]: I1205 20:11:57.877796 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_53a46b29-95f9-43a2-8d2a-770693317314/cinder-api-log/0.log" Dec 05 20:11:58 crc kubenswrapper[4982]: I1205 20:11:58.034610 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_90a28c6e-ad50-4f47-900b-f35bc06060a3/cinder-scheduler/0.log" Dec 05 20:11:58 crc kubenswrapper[4982]: I1205 20:11:58.184298 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_90a28c6e-ad50-4f47-900b-f35bc06060a3/probe/0.log" Dec 05 20:11:58 crc kubenswrapper[4982]: I1205 20:11:58.309622 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_a30af9ef-11ee-4919-8ca8-2ba7d588264b/cloudkitty-api/0.log" Dec 05 20:11:58 crc kubenswrapper[4982]: I1205 20:11:58.363862 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_a30af9ef-11ee-4919-8ca8-2ba7d588264b/cloudkitty-api-log/0.log" Dec 05 20:11:58 crc kubenswrapper[4982]: I1205 20:11:58.424322 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_892ec5b5-4495-4ef9-ae57-7e3c535e11ca/loki-compactor/0.log" Dec 05 20:11:58 crc kubenswrapper[4982]: I1205 20:11:58.585189 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-664b687b54-xvg29_97f36210-9f01-4ba6-95e4-0aea23aefbb3/loki-distributor/0.log" Dec 05 20:11:58 crc kubenswrapper[4982]: I1205 20:11:58.679744 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-bc75944f-jgdkk_25283efb-caa6-418a-8228-f3dcf1802be2/gateway/0.log" Dec 05 20:11:58 crc kubenswrapper[4982]: I1205 20:11:58.765396 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-bc75944f-xcgh9_b670abee-8f29-4979-8c53-5226b58a0141/gateway/0.log" Dec 05 20:11:58 crc kubenswrapper[4982]: I1205 20:11:58.882067 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_8c96cc97-f375-489f-9168-bac3695b309a/loki-index-gateway/0.log" Dec 05 20:11:59 crc kubenswrapper[4982]: I1205 20:11:59.371135 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_85a8d068-da43-4ed2-879a-281872eab097/loki-ingester/0.log" Dec 05 20:11:59 crc kubenswrapper[4982]: I1205 20:11:59.647561 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z_4e468d19-dc16-452b-b3c8-cd5df67c4748/loki-query-frontend/0.log" Dec 05 20:12:00 crc kubenswrapper[4982]: I1205 20:12:00.019087 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4_27e277b5-5b4e-4d77-afbd-1b7c2d53918e/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:00 crc kubenswrapper[4982]: I1205 20:12:00.200022 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-5467947bf7-zrl2q_39dc27e3-5788-40fd-b186-9c91aa5618eb/loki-querier/0.log" Dec 05 20:12:00 crc kubenswrapper[4982]: I1205 20:12:00.262759 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl_9817cc48-c666-468c-a9cf-327fa1898ad9/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:00 crc kubenswrapper[4982]: I1205 20:12:00.481424 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5475ccd585-jn7l9_a33770ab-0040-4eb9-92c5-7c25cb66fa33/init/0.log" Dec 05 20:12:00 crc kubenswrapper[4982]: I1205 20:12:00.693599 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5475ccd585-jn7l9_a33770ab-0040-4eb9-92c5-7c25cb66fa33/init/0.log" Dec 05 20:12:00 crc kubenswrapper[4982]: I1205 20:12:00.845372 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5475ccd585-jn7l9_a33770ab-0040-4eb9-92c5-7c25cb66fa33/dnsmasq-dns/0.log" Dec 05 20:12:00 crc kubenswrapper[4982]: I1205 20:12:00.867827 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq_2d9ae0b1-8af5-4522-af29-d67b2c829ca0/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:01 crc kubenswrapper[4982]: I1205 20:12:01.046102 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_de9f5462-cedf-4860-9b59-bac07091738f/glance-log/0.log" Dec 05 20:12:01 crc kubenswrapper[4982]: I1205 20:12:01.091086 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_de9f5462-cedf-4860-9b59-bac07091738f/glance-httpd/0.log" Dec 05 20:12:01 crc kubenswrapper[4982]: I1205 20:12:01.208801 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_36af0f53-11a4-47a8-9361-acde52280271/glance-httpd/0.log" Dec 05 20:12:01 crc kubenswrapper[4982]: I1205 20:12:01.285792 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_36af0f53-11a4-47a8-9361-acde52280271/glance-log/0.log" Dec 05 20:12:01 crc kubenswrapper[4982]: I1205 20:12:01.386897 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q_ddbe0e8f-d183-4f24-a7cf-221b3221cb27/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:01 crc kubenswrapper[4982]: I1205 20:12:01.579503 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-vlqtj_f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:01 crc kubenswrapper[4982]: I1205 20:12:01.838574 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29416081-4j7pq_76cb36b1-d2a2-4742-8886-fc3305fb4082/keystone-cron/0.log" Dec 05 20:12:01 crc kubenswrapper[4982]: I1205 20:12:01.950783 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-768c967fc5-mm4fv_87f55498-f78b-4201-9970-b393206ddabf/keystone-api/0.log" Dec 05 20:12:02 crc kubenswrapper[4982]: I1205 20:12:02.057280 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_babd6ca2-04d1-4f51-aaa6-d621a339d799/kube-state-metrics/0.log" Dec 05 20:12:02 crc kubenswrapper[4982]: I1205 20:12:02.161883 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d/cloudkitty-proc/0.log" Dec 05 20:12:02 crc kubenswrapper[4982]: I1205 20:12:02.204364 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-lbxct_8f1fe72c-1893-4aa4-9fc9-5ab862de7c35/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:02 crc kubenswrapper[4982]: I1205 20:12:02.481910 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-67fdf46f87-pnjnc_4afaca8a-acd1-480f-a132-33155fb3b1b0/neutron-api/0.log" Dec 05 20:12:02 crc kubenswrapper[4982]: I1205 20:12:02.575403 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-67fdf46f87-pnjnc_4afaca8a-acd1-480f-a132-33155fb3b1b0/neutron-httpd/0.log" Dec 05 20:12:02 crc kubenswrapper[4982]: I1205 20:12:02.647060 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww_6ff78dfa-4b95-4e32-b569-08d967824332/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:03 crc kubenswrapper[4982]: I1205 20:12:03.346118 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c7fa7720-ccc7-4ddf-8102-e10818187b20/nova-cell0-conductor-conductor/0.log" Dec 05 20:12:03 crc kubenswrapper[4982]: I1205 20:12:03.390949 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_eaf71bf8-6e74-4fec-a151-372e1044b69f/nova-api-log/0.log" Dec 05 20:12:03 crc kubenswrapper[4982]: I1205 20:12:03.485094 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_eaf71bf8-6e74-4fec-a151-372e1044b69f/nova-api-api/0.log" Dec 05 20:12:03 crc kubenswrapper[4982]: I1205 20:12:03.684000 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_b1112e38-5f73-4a31-8f6d-9b03a9148c02/nova-cell1-conductor-conductor/0.log" Dec 05 20:12:03 crc kubenswrapper[4982]: I1205 20:12:03.716449 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_592eec2a-b340-4c42-8b06-ab477b4aecfa/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 20:12:03 crc kubenswrapper[4982]: I1205 20:12:03.955045 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-qxxl7_592205cb-46f4-4bc6-9329-a90e5e63400e/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:04 crc kubenswrapper[4982]: I1205 20:12:04.078726 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a196cf68-e14d-4171-90f5-a266a8313f72/nova-metadata-log/0.log" Dec 05 20:12:04 crc kubenswrapper[4982]: I1205 20:12:04.424792 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_c2cfe605-218c-442d-b88b-8f2d7b3a6ba0/nova-scheduler-scheduler/0.log" Dec 05 20:12:04 crc kubenswrapper[4982]: I1205 20:12:04.453244 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_af72a355-0521-4724-8224-c7fd9046b4d6/mysql-bootstrap/0.log" Dec 05 20:12:04 crc kubenswrapper[4982]: I1205 20:12:04.642395 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_af72a355-0521-4724-8224-c7fd9046b4d6/mysql-bootstrap/0.log" Dec 05 20:12:04 crc kubenswrapper[4982]: I1205 20:12:04.774027 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_af72a355-0521-4724-8224-c7fd9046b4d6/galera/0.log" Dec 05 20:12:04 crc kubenswrapper[4982]: I1205 20:12:04.930069 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b6542514-631f-4a81-aba8-11cfebd33048/mysql-bootstrap/0.log" Dec 05 20:12:04 crc kubenswrapper[4982]: I1205 20:12:04.969653 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a196cf68-e14d-4171-90f5-a266a8313f72/nova-metadata-metadata/0.log" Dec 05 20:12:05 crc kubenswrapper[4982]: I1205 20:12:05.099556 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b6542514-631f-4a81-aba8-11cfebd33048/galera/0.log" Dec 05 20:12:05 crc kubenswrapper[4982]: I1205 20:12:05.122490 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b6542514-631f-4a81-aba8-11cfebd33048/mysql-bootstrap/0.log" Dec 05 20:12:05 crc kubenswrapper[4982]: I1205 20:12:05.245677 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2/openstackclient/0.log" Dec 05 20:12:05 crc kubenswrapper[4982]: I1205 20:12:05.433668 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jhsjf_c4d66436-88ae-4023-9601-bd2aa6954667/ovn-controller/0.log" Dec 05 20:12:05 crc kubenswrapper[4982]: I1205 20:12:05.486714 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-df27x_c19a072d-5061-4c2e-a817-8708ec746095/openstack-network-exporter/0.log" Dec 05 20:12:05 crc kubenswrapper[4982]: I1205 20:12:05.651133 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-sct9d_d452a876-70e0-416c-ab4d-667b53e8f86e/ovsdb-server-init/0.log" Dec 05 20:12:05 crc kubenswrapper[4982]: I1205 20:12:05.817210 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-sct9d_d452a876-70e0-416c-ab4d-667b53e8f86e/ovs-vswitchd/0.log" Dec 05 20:12:05 crc kubenswrapper[4982]: I1205 20:12:05.835282 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-sct9d_d452a876-70e0-416c-ab4d-667b53e8f86e/ovsdb-server/0.log" Dec 05 20:12:05 crc kubenswrapper[4982]: I1205 20:12:05.855786 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-sct9d_d452a876-70e0-416c-ab4d-667b53e8f86e/ovsdb-server-init/0.log" Dec 05 20:12:06 crc kubenswrapper[4982]: I1205 20:12:06.026409 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-vgqgj_19ae7f76-4c93-41fe-9ac4-aead0ad360fe/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:06 crc kubenswrapper[4982]: I1205 20:12:06.044828 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e6b8e1da-6aa2-4556-a427-35c1f9920482/openstack-network-exporter/0.log" Dec 05 20:12:06 crc kubenswrapper[4982]: I1205 20:12:06.154949 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e6b8e1da-6aa2-4556-a427-35c1f9920482/ovn-northd/0.log" Dec 05 20:12:06 crc kubenswrapper[4982]: I1205 20:12:06.263403 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_7d0f7663-0afb-41dc-bae8-7efdafbf2ed2/openstack-network-exporter/0.log" Dec 05 20:12:06 crc kubenswrapper[4982]: I1205 20:12:06.555886 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_7d0f7663-0afb-41dc-bae8-7efdafbf2ed2/ovsdbserver-nb/0.log" Dec 05 20:12:06 crc kubenswrapper[4982]: I1205 20:12:06.713792 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0/ovsdbserver-sb/0.log" Dec 05 20:12:06 crc kubenswrapper[4982]: I1205 20:12:06.719050 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0/openstack-network-exporter/0.log" Dec 05 20:12:06 crc kubenswrapper[4982]: I1205 20:12:06.953640 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-59b8477896-ww7nl_c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235/placement-api/0.log" Dec 05 20:12:07 crc kubenswrapper[4982]: I1205 20:12:07.046680 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-59b8477896-ww7nl_c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235/placement-log/0.log" Dec 05 20:12:07 crc kubenswrapper[4982]: I1205 20:12:07.086351 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a65aa27d-1e48-4991-a573-68e9458e1733/init-config-reloader/0.log" Dec 05 20:12:07 crc kubenswrapper[4982]: I1205 20:12:07.243686 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a65aa27d-1e48-4991-a573-68e9458e1733/thanos-sidecar/0.log" Dec 05 20:12:07 crc kubenswrapper[4982]: I1205 20:12:07.270577 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a65aa27d-1e48-4991-a573-68e9458e1733/config-reloader/0.log" Dec 05 20:12:07 crc kubenswrapper[4982]: I1205 20:12:07.301093 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a65aa27d-1e48-4991-a573-68e9458e1733/init-config-reloader/0.log" Dec 05 20:12:07 crc kubenswrapper[4982]: I1205 20:12:07.321273 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a65aa27d-1e48-4991-a573-68e9458e1733/prometheus/0.log" Dec 05 20:12:07 crc kubenswrapper[4982]: I1205 20:12:07.545615 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e38c99e0-3c00-4474-9a4e-b388a5630685/setup-container/0.log" Dec 05 20:12:07 crc kubenswrapper[4982]: I1205 20:12:07.785126 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e38c99e0-3c00-4474-9a4e-b388a5630685/setup-container/0.log" Dec 05 20:12:07 crc kubenswrapper[4982]: I1205 20:12:07.814113 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e38c99e0-3c00-4474-9a4e-b388a5630685/rabbitmq/0.log" Dec 05 20:12:07 crc kubenswrapper[4982]: I1205 20:12:07.894889 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_d9c1e005-1e95-440f-be18-77dbe6a757db/setup-container/0.log" Dec 05 20:12:08 crc kubenswrapper[4982]: I1205 20:12:08.071170 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_d9c1e005-1e95-440f-be18-77dbe6a757db/rabbitmq/0.log" Dec 05 20:12:08 crc kubenswrapper[4982]: I1205 20:12:08.107647 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_d9c1e005-1e95-440f-be18-77dbe6a757db/setup-container/0.log" Dec 05 20:12:08 crc kubenswrapper[4982]: I1205 20:12:08.165033 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-cf864_c22ac692-8285-4e89-8c2b-28b2bc125fa3/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:08 crc kubenswrapper[4982]: I1205 20:12:08.278344 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-lk6dp_5eedd2f2-bb50-4da7-846d-000d03e17934/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:08 crc kubenswrapper[4982]: I1205 20:12:08.430524 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg_1458b955-6299-4a91-a904-4146c620e208/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:08 crc kubenswrapper[4982]: I1205 20:12:08.636783 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-x59ld_96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:08 crc kubenswrapper[4982]: I1205 20:12:08.772058 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-5zqdl_1f38261e-9d6a-4983-873c-7e7cf37ebb81/ssh-known-hosts-edpm-deployment/0.log" Dec 05 20:12:08 crc kubenswrapper[4982]: I1205 20:12:08.997006 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-fc787d555-k2pps_4ae7179d-f311-4080-9409-b5315377edea/proxy-httpd/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.023280 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-fc787d555-k2pps_4ae7179d-f311-4080-9409-b5315377edea/proxy-server/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.134419 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-hrhl7_155eecea-ebae-400d-a81e-1d28392b290e/swift-ring-rebalance/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.274296 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/account-auditor/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.286869 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/account-reaper/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.351030 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/account-replicator/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.489328 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/account-server/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.508298 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/container-auditor/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.530435 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/container-replicator/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.539234 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/container-server/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.724427 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/container-updater/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.767753 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/object-expirer/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.770100 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/object-auditor/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.825982 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/object-replicator/0.log" Dec 05 20:12:09 crc kubenswrapper[4982]: I1205 20:12:09.957910 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/object-server/0.log" Dec 05 20:12:10 crc kubenswrapper[4982]: I1205 20:12:10.001971 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/rsync/0.log" Dec 05 20:12:10 crc kubenswrapper[4982]: I1205 20:12:10.060064 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/swift-recon-cron/0.log" Dec 05 20:12:10 crc kubenswrapper[4982]: I1205 20:12:10.069237 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/object-updater/0.log" Dec 05 20:12:10 crc kubenswrapper[4982]: I1205 20:12:10.254818 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc_f9f55ee0-0c0c-4edf-9b1f-17ff56560708/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:10 crc kubenswrapper[4982]: I1205 20:12:10.297284 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_d3c6fe00-7794-450c-a588-bd1e2afdbe8c/tempest-tests-tempest-tests-runner/0.log" Dec 05 20:12:10 crc kubenswrapper[4982]: I1205 20:12:10.462096 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_f60aebec-8123-4265-b873-fba9eb0f911b/test-operator-logs-container/0.log" Dec 05 20:12:10 crc kubenswrapper[4982]: I1205 20:12:10.620537 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-b56qn_d26e25b0-3708-4c24-9034-36a8ab878465/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:12:12 crc kubenswrapper[4982]: I1205 20:12:12.556520 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:12:12 crc kubenswrapper[4982]: I1205 20:12:12.556575 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:12:12 crc kubenswrapper[4982]: I1205 20:12:12.556616 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 20:12:12 crc kubenswrapper[4982]: I1205 20:12:12.557479 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"56457d337403394131ed1ce3ee5674c0b587eb58162b40191049d3e206cb4edf"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 20:12:12 crc kubenswrapper[4982]: I1205 20:12:12.557527 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://56457d337403394131ed1ce3ee5674c0b587eb58162b40191049d3e206cb4edf" gracePeriod=600 Dec 05 20:12:13 crc kubenswrapper[4982]: I1205 20:12:13.551954 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="56457d337403394131ed1ce3ee5674c0b587eb58162b40191049d3e206cb4edf" exitCode=0 Dec 05 20:12:13 crc kubenswrapper[4982]: I1205 20:12:13.551999 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"56457d337403394131ed1ce3ee5674c0b587eb58162b40191049d3e206cb4edf"} Dec 05 20:12:13 crc kubenswrapper[4982]: I1205 20:12:13.552315 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c"} Dec 05 20:12:13 crc kubenswrapper[4982]: I1205 20:12:13.552337 4982 scope.go:117] "RemoveContainer" containerID="607ad95b1d4467f61306dab6160775beb9b9c95a8211e7e50334853060745fc3" Dec 05 20:12:14 crc kubenswrapper[4982]: I1205 20:12:14.030619 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_34fad3bb-6720-4219-8862-08492842062a/memcached/0.log" Dec 05 20:12:35 crc kubenswrapper[4982]: I1205 20:12:35.762099 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/util/0.log" Dec 05 20:12:35 crc kubenswrapper[4982]: I1205 20:12:35.942906 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/pull/0.log" Dec 05 20:12:35 crc kubenswrapper[4982]: I1205 20:12:35.946633 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/util/0.log" Dec 05 20:12:35 crc kubenswrapper[4982]: I1205 20:12:35.957265 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/pull/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.156771 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/pull/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.170651 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/util/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.185677 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/extract/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.334348 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-f8852_ebffce09-7b77-4c54-9c5a-520517cc3aa8/kube-rbac-proxy/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.408676 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-f8852_ebffce09-7b77-4c54-9c5a-520517cc3aa8/manager/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.450498 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-wrfxk_b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a/kube-rbac-proxy/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.587495 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-wrfxk_b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a/manager/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.601683 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-t2zqn_55a262ed-8b7c-4e28-af82-89c5df1f675b/kube-rbac-proxy/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.665111 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-t2zqn_55a262ed-8b7c-4e28-af82-89c5df1f675b/manager/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.777131 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-p6gnj_517bb39b-5710-45f0-b70a-694dc5b4d044/kube-rbac-proxy/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.918909 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-p6gnj_517bb39b-5710-45f0-b70a-694dc5b4d044/manager/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.977616 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-rvzdw_511305c1-7bff-43ce-b398-e5aec02fa9ec/manager/0.log" Dec 05 20:12:36 crc kubenswrapper[4982]: I1205 20:12:36.982554 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-rvzdw_511305c1-7bff-43ce-b398-e5aec02fa9ec/kube-rbac-proxy/0.log" Dec 05 20:12:37 crc kubenswrapper[4982]: I1205 20:12:37.136372 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-rrjv5_25671f72-e601-41d8-9617-fb9c436e7959/kube-rbac-proxy/0.log" Dec 05 20:12:37 crc kubenswrapper[4982]: I1205 20:12:37.183408 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-rrjv5_25671f72-e601-41d8-9617-fb9c436e7959/manager/0.log" Dec 05 20:12:37 crc kubenswrapper[4982]: I1205 20:12:37.373334 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-fnkqx_ab5cdcbf-c82f-48be-a97d-65a856e95bd9/kube-rbac-proxy/0.log" Dec 05 20:12:37 crc kubenswrapper[4982]: I1205 20:12:37.427562 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-ms8zf_5cd3d875-b57b-4b61-ac66-17035d351f35/kube-rbac-proxy/0.log" Dec 05 20:12:37 crc kubenswrapper[4982]: I1205 20:12:37.529228 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-fnkqx_ab5cdcbf-c82f-48be-a97d-65a856e95bd9/manager/0.log" Dec 05 20:12:37 crc kubenswrapper[4982]: I1205 20:12:37.605770 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-ms8zf_5cd3d875-b57b-4b61-ac66-17035d351f35/manager/0.log" Dec 05 20:12:37 crc kubenswrapper[4982]: I1205 20:12:37.610387 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-w8qcr_701bf927-7bac-49a4-9435-a68ebd3ff8c4/kube-rbac-proxy/0.log" Dec 05 20:12:37 crc kubenswrapper[4982]: I1205 20:12:37.768599 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-w8qcr_701bf927-7bac-49a4-9435-a68ebd3ff8c4/manager/0.log" Dec 05 20:12:37 crc kubenswrapper[4982]: I1205 20:12:37.799230 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-ctg6p_d30f627a-9e43-4435-aaf3-31a0631bfcba/kube-rbac-proxy/0.log" Dec 05 20:12:37 crc kubenswrapper[4982]: I1205 20:12:37.834023 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-ctg6p_d30f627a-9e43-4435-aaf3-31a0631bfcba/manager/0.log" Dec 05 20:12:37 crc kubenswrapper[4982]: I1205 20:12:37.965526 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-472kw_d4adcb8f-8951-4c59-adf3-e94b1a5e202b/kube-rbac-proxy/0.log" Dec 05 20:12:38 crc kubenswrapper[4982]: I1205 20:12:38.031457 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-472kw_d4adcb8f-8951-4c59-adf3-e94b1a5e202b/manager/0.log" Dec 05 20:12:38 crc kubenswrapper[4982]: I1205 20:12:38.197711 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-7rqsz_b4d06e27-b91f-4602-b327-3435d8977280/kube-rbac-proxy/0.log" Dec 05 20:12:38 crc kubenswrapper[4982]: I1205 20:12:38.204653 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-7rqsz_b4d06e27-b91f-4602-b327-3435d8977280/manager/0.log" Dec 05 20:12:38 crc kubenswrapper[4982]: I1205 20:12:38.246321 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-7hbtv_2e0bfce4-bfd5-49ae-858f-647f5f8a919e/kube-rbac-proxy/0.log" Dec 05 20:12:38 crc kubenswrapper[4982]: I1205 20:12:38.454121 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-7hbtv_2e0bfce4-bfd5-49ae-858f-647f5f8a919e/manager/0.log" Dec 05 20:12:38 crc kubenswrapper[4982]: I1205 20:12:38.478698 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-d6svn_983b81df-c036-4f75-8d49-259f09235991/kube-rbac-proxy/0.log" Dec 05 20:12:38 crc kubenswrapper[4982]: I1205 20:12:38.595367 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-d6svn_983b81df-c036-4f75-8d49-259f09235991/manager/0.log" Dec 05 20:12:38 crc kubenswrapper[4982]: I1205 20:12:38.650644 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl_c286a8e0-15ff-4705-a03f-bca226144360/kube-rbac-proxy/0.log" Dec 05 20:12:38 crc kubenswrapper[4982]: I1205 20:12:38.679911 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl_c286a8e0-15ff-4705-a03f-bca226144360/manager/0.log" Dec 05 20:12:39 crc kubenswrapper[4982]: I1205 20:12:39.137217 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-2f2gp_c5e545c3-45c4-4c76-a6cf-e45385919b9d/registry-server/0.log" Dec 05 20:12:39 crc kubenswrapper[4982]: I1205 20:12:39.137918 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-79fbdbdfdc-dt7k6_5619a079-821c-49cb-84f5-136f41ff45a5/operator/0.log" Dec 05 20:12:39 crc kubenswrapper[4982]: I1205 20:12:39.403328 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-xww76_a964acf3-fa80-4561-86da-c831a10fc58e/kube-rbac-proxy/0.log" Dec 05 20:12:39 crc kubenswrapper[4982]: I1205 20:12:39.520658 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-xww76_a964acf3-fa80-4561-86da-c831a10fc58e/manager/0.log" Dec 05 20:12:39 crc kubenswrapper[4982]: I1205 20:12:39.652740 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-kzpx6_c959458c-8a9a-4d37-889a-577a673e5305/kube-rbac-proxy/0.log" Dec 05 20:12:39 crc kubenswrapper[4982]: I1205 20:12:39.740161 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-kzpx6_c959458c-8a9a-4d37-889a-577a673e5305/manager/0.log" Dec 05 20:12:39 crc kubenswrapper[4982]: I1205 20:12:39.929999 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-wst8r_1f84d21b-6ce8-4c97-a104-cb308ce8527d/operator/0.log" Dec 05 20:12:39 crc kubenswrapper[4982]: I1205 20:12:39.947415 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-8ttpr_167e755e-d998-47ca-88dd-0bc17c975864/kube-rbac-proxy/0.log" Dec 05 20:12:40 crc kubenswrapper[4982]: I1205 20:12:40.078022 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-8ttpr_167e755e-d998-47ca-88dd-0bc17c975864/manager/0.log" Dec 05 20:12:40 crc kubenswrapper[4982]: I1205 20:12:40.133865 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-54bf4fb767-47tg5_eb889ad3-88cd-45b4-9b56-13d3181ba3e6/manager/0.log" Dec 05 20:12:40 crc kubenswrapper[4982]: I1205 20:12:40.173722 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6b4849bfff-skwf7_7b1c4531-0231-42d4-94e9-0a211394dfa6/kube-rbac-proxy/0.log" Dec 05 20:12:40 crc kubenswrapper[4982]: I1205 20:12:40.459756 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6b4849bfff-skwf7_7b1c4531-0231-42d4-94e9-0a211394dfa6/manager/0.log" Dec 05 20:12:40 crc kubenswrapper[4982]: I1205 20:12:40.491919 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d48dh_26fa2fe6-70bb-4a70-8bee-b0cde872beb1/kube-rbac-proxy/0.log" Dec 05 20:12:40 crc kubenswrapper[4982]: I1205 20:12:40.519916 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d48dh_26fa2fe6-70bb-4a70-8bee-b0cde872beb1/manager/0.log" Dec 05 20:12:40 crc kubenswrapper[4982]: I1205 20:12:40.708253 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-829rq_cf51df5f-9adb-4929-9a00-6bfeafdfa069/kube-rbac-proxy/0.log" Dec 05 20:12:40 crc kubenswrapper[4982]: I1205 20:12:40.723704 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-829rq_cf51df5f-9adb-4929-9a00-6bfeafdfa069/manager/0.log" Dec 05 20:13:01 crc kubenswrapper[4982]: I1205 20:13:01.086367 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-xctm7_72f8774d-c2a9-4489-9812-2b72525fe9d9/control-plane-machine-set-operator/0.log" Dec 05 20:13:01 crc kubenswrapper[4982]: I1205 20:13:01.182616 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-bh4fj_8d4c7ce2-7724-494b-b86a-23627074ce45/kube-rbac-proxy/0.log" Dec 05 20:13:01 crc kubenswrapper[4982]: I1205 20:13:01.246033 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-bh4fj_8d4c7ce2-7724-494b-b86a-23627074ce45/machine-api-operator/0.log" Dec 05 20:13:15 crc kubenswrapper[4982]: I1205 20:13:15.064075 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-skf6w_f1ca36be-fe08-4f98-be99-35f9e8265a79/cert-manager-controller/0.log" Dec 05 20:13:15 crc kubenswrapper[4982]: I1205 20:13:15.190344 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-sxnb8_9825df92-fee9-4a92-b324-62162aacc6fe/cert-manager-cainjector/0.log" Dec 05 20:13:15 crc kubenswrapper[4982]: I1205 20:13:15.272030 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-wtdnn_f52e413e-741e-4b30-b1d7-e687e31c16e5/cert-manager-webhook/0.log" Dec 05 20:13:28 crc kubenswrapper[4982]: I1205 20:13:28.633341 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-2zqs2_47097c85-dd76-46fd-b837-c5f9e2f5b599/nmstate-console-plugin/0.log" Dec 05 20:13:28 crc kubenswrapper[4982]: I1205 20:13:28.801640 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-l8vt9_d69403b9-dbd1-4059-afca-e7a907dd2c08/kube-rbac-proxy/0.log" Dec 05 20:13:28 crc kubenswrapper[4982]: I1205 20:13:28.803628 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-4dnv7_38b59b38-39e3-4a09-b50e-0cfa9035cd3f/nmstate-handler/0.log" Dec 05 20:13:28 crc kubenswrapper[4982]: I1205 20:13:28.869964 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-l8vt9_d69403b9-dbd1-4059-afca-e7a907dd2c08/nmstate-metrics/0.log" Dec 05 20:13:29 crc kubenswrapper[4982]: I1205 20:13:29.013742 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-s9fkp_0a9ab7cb-3dc4-4b37-9bc0-a1bdf690306d/nmstate-operator/0.log" Dec 05 20:13:29 crc kubenswrapper[4982]: I1205 20:13:29.124933 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-lrtmh_085cb474-3881-41ed-b0fa-6a3d237ec343/nmstate-webhook/0.log" Dec 05 20:13:41 crc kubenswrapper[4982]: I1205 20:13:41.400133 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-684f549574-q5qzv_362ad9c9-a652-4965-9d36-10c0332bff02/kube-rbac-proxy/0.log" Dec 05 20:13:41 crc kubenswrapper[4982]: I1205 20:13:41.450052 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-684f549574-q5qzv_362ad9c9-a652-4965-9d36-10c0332bff02/manager/0.log" Dec 05 20:13:54 crc kubenswrapper[4982]: I1205 20:13:54.985587 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-f2lxm_e5934f26-82dc-4376-9073-0d24a57aadb1/kube-rbac-proxy/0.log" Dec 05 20:13:55 crc kubenswrapper[4982]: I1205 20:13:55.108398 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-f2lxm_e5934f26-82dc-4376-9073-0d24a57aadb1/controller/0.log" Dec 05 20:13:55 crc kubenswrapper[4982]: I1205 20:13:55.273498 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-frr-files/0.log" Dec 05 20:13:55 crc kubenswrapper[4982]: I1205 20:13:55.477562 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-metrics/0.log" Dec 05 20:13:55 crc kubenswrapper[4982]: I1205 20:13:55.497013 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-reloader/0.log" Dec 05 20:13:55 crc kubenswrapper[4982]: I1205 20:13:55.512840 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-reloader/0.log" Dec 05 20:13:55 crc kubenswrapper[4982]: I1205 20:13:55.586316 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-frr-files/0.log" Dec 05 20:13:55 crc kubenswrapper[4982]: I1205 20:13:55.694587 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-frr-files/0.log" Dec 05 20:13:55 crc kubenswrapper[4982]: I1205 20:13:55.722510 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-reloader/0.log" Dec 05 20:13:55 crc kubenswrapper[4982]: I1205 20:13:55.776645 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-metrics/0.log" Dec 05 20:13:55 crc kubenswrapper[4982]: I1205 20:13:55.797060 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-metrics/0.log" Dec 05 20:13:55 crc kubenswrapper[4982]: I1205 20:13:55.995233 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-frr-files/0.log" Dec 05 20:13:56 crc kubenswrapper[4982]: I1205 20:13:56.020610 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-metrics/0.log" Dec 05 20:13:56 crc kubenswrapper[4982]: I1205 20:13:56.024494 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/controller/0.log" Dec 05 20:13:56 crc kubenswrapper[4982]: I1205 20:13:56.025391 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-reloader/0.log" Dec 05 20:13:56 crc kubenswrapper[4982]: I1205 20:13:56.211609 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/frr-metrics/0.log" Dec 05 20:13:56 crc kubenswrapper[4982]: I1205 20:13:56.228023 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/kube-rbac-proxy/0.log" Dec 05 20:13:56 crc kubenswrapper[4982]: I1205 20:13:56.232122 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/kube-rbac-proxy-frr/0.log" Dec 05 20:13:56 crc kubenswrapper[4982]: I1205 20:13:56.481893 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/reloader/0.log" Dec 05 20:13:56 crc kubenswrapper[4982]: I1205 20:13:56.516034 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-t9drp_8ad46eb3-de5d-4122-82fe-5cf11faf01bc/frr-k8s-webhook-server/0.log" Dec 05 20:13:56 crc kubenswrapper[4982]: I1205 20:13:56.721211 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7d4d6d9964-7tmb4_4ba9f951-374b-45ba-a5d1-de7393862f1d/manager/0.log" Dec 05 20:13:56 crc kubenswrapper[4982]: I1205 20:13:56.942671 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-8ddfdf549-2zbg6_858ccc6d-475e-4636-b597-c155973b2e85/webhook-server/0.log" Dec 05 20:13:57 crc kubenswrapper[4982]: I1205 20:13:57.079923 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-68bx7_9d70d003-cbaf-4f61-b929-2803e9789657/kube-rbac-proxy/0.log" Dec 05 20:13:57 crc kubenswrapper[4982]: I1205 20:13:57.678616 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-68bx7_9d70d003-cbaf-4f61-b929-2803e9789657/speaker/0.log" Dec 05 20:13:57 crc kubenswrapper[4982]: I1205 20:13:57.749096 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/frr/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.057563 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/util/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.202794 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/util/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.206667 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/pull/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.257565 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/pull/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.449330 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/extract/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.452935 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/util/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.455787 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/pull/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.607859 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/util/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.759575 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/pull/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.783073 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/pull/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.793668 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/util/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.950861 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/util/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.951764 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/pull/0.log" Dec 05 20:14:10 crc kubenswrapper[4982]: I1205 20:14:10.952626 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/extract/0.log" Dec 05 20:14:11 crc kubenswrapper[4982]: I1205 20:14:11.118079 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/util/0.log" Dec 05 20:14:11 crc kubenswrapper[4982]: I1205 20:14:11.331475 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/pull/0.log" Dec 05 20:14:11 crc kubenswrapper[4982]: I1205 20:14:11.334032 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/pull/0.log" Dec 05 20:14:11 crc kubenswrapper[4982]: I1205 20:14:11.350422 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/util/0.log" Dec 05 20:14:11 crc kubenswrapper[4982]: I1205 20:14:11.495919 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/pull/0.log" Dec 05 20:14:11 crc kubenswrapper[4982]: I1205 20:14:11.498274 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/extract/0.log" Dec 05 20:14:11 crc kubenswrapper[4982]: I1205 20:14:11.511617 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/util/0.log" Dec 05 20:14:11 crc kubenswrapper[4982]: I1205 20:14:11.678431 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/util/0.log" Dec 05 20:14:11 crc kubenswrapper[4982]: I1205 20:14:11.787024 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/util/0.log" Dec 05 20:14:11 crc kubenswrapper[4982]: I1205 20:14:11.827046 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/pull/0.log" Dec 05 20:14:11 crc kubenswrapper[4982]: I1205 20:14:11.835962 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/pull/0.log" Dec 05 20:14:11 crc kubenswrapper[4982]: I1205 20:14:11.985051 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/util/0.log" Dec 05 20:14:12 crc kubenswrapper[4982]: I1205 20:14:12.003671 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/pull/0.log" Dec 05 20:14:12 crc kubenswrapper[4982]: I1205 20:14:12.026991 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/extract/0.log" Dec 05 20:14:12 crc kubenswrapper[4982]: I1205 20:14:12.171439 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/extract-utilities/0.log" Dec 05 20:14:12 crc kubenswrapper[4982]: I1205 20:14:12.370134 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/extract-content/0.log" Dec 05 20:14:12 crc kubenswrapper[4982]: I1205 20:14:12.372292 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/extract-content/0.log" Dec 05 20:14:12 crc kubenswrapper[4982]: I1205 20:14:12.399653 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/extract-utilities/0.log" Dec 05 20:14:12 crc kubenswrapper[4982]: I1205 20:14:12.542714 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/extract-utilities/0.log" Dec 05 20:14:12 crc kubenswrapper[4982]: I1205 20:14:12.557475 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:14:12 crc kubenswrapper[4982]: I1205 20:14:12.557557 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:14:12 crc kubenswrapper[4982]: I1205 20:14:12.563663 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/extract-content/0.log" Dec 05 20:14:12 crc kubenswrapper[4982]: I1205 20:14:12.800856 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/extract-utilities/0.log" Dec 05 20:14:13 crc kubenswrapper[4982]: I1205 20:14:13.013908 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/extract-utilities/0.log" Dec 05 20:14:13 crc kubenswrapper[4982]: I1205 20:14:13.062834 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/extract-content/0.log" Dec 05 20:14:13 crc kubenswrapper[4982]: I1205 20:14:13.138073 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/extract-content/0.log" Dec 05 20:14:13 crc kubenswrapper[4982]: I1205 20:14:13.139270 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/registry-server/0.log" Dec 05 20:14:13 crc kubenswrapper[4982]: I1205 20:14:13.258753 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/extract-content/0.log" Dec 05 20:14:13 crc kubenswrapper[4982]: I1205 20:14:13.270138 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/extract-utilities/0.log" Dec 05 20:14:13 crc kubenswrapper[4982]: I1205 20:14:13.491947 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-n4462_66d787d5-2cd9-4a22-8549-acd33135e4f9/marketplace-operator/0.log" Dec 05 20:14:13 crc kubenswrapper[4982]: I1205 20:14:13.675879 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/extract-utilities/0.log" Dec 05 20:14:13 crc kubenswrapper[4982]: I1205 20:14:13.964537 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/registry-server/0.log" Dec 05 20:14:14 crc kubenswrapper[4982]: I1205 20:14:14.008351 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/extract-content/0.log" Dec 05 20:14:14 crc kubenswrapper[4982]: I1205 20:14:14.064806 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/extract-content/0.log" Dec 05 20:14:14 crc kubenswrapper[4982]: I1205 20:14:14.077190 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/extract-utilities/0.log" Dec 05 20:14:14 crc kubenswrapper[4982]: I1205 20:14:14.406911 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/extract-content/0.log" Dec 05 20:14:14 crc kubenswrapper[4982]: I1205 20:14:14.417868 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/extract-utilities/0.log" Dec 05 20:14:14 crc kubenswrapper[4982]: I1205 20:14:14.566614 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/extract-utilities/0.log" Dec 05 20:14:14 crc kubenswrapper[4982]: I1205 20:14:14.599279 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/registry-server/0.log" Dec 05 20:14:14 crc kubenswrapper[4982]: I1205 20:14:14.744403 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/extract-content/0.log" Dec 05 20:14:14 crc kubenswrapper[4982]: I1205 20:14:14.791766 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/extract-utilities/0.log" Dec 05 20:14:14 crc kubenswrapper[4982]: I1205 20:14:14.836689 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/extract-content/0.log" Dec 05 20:14:14 crc kubenswrapper[4982]: I1205 20:14:14.995496 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/extract-utilities/0.log" Dec 05 20:14:14 crc kubenswrapper[4982]: I1205 20:14:14.996958 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/extract-content/0.log" Dec 05 20:14:15 crc kubenswrapper[4982]: I1205 20:14:15.353589 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/registry-server/0.log" Dec 05 20:14:28 crc kubenswrapper[4982]: I1205 20:14:28.806987 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-5mhck_e2c8beeb-010c-4aac-b407-981a15acaee9/prometheus-operator/0.log" Dec 05 20:14:29 crc kubenswrapper[4982]: I1205 20:14:29.000787 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_1624c766-4ba0-48bf-a7b7-4a8322251e2e/prometheus-operator-admission-webhook/0.log" Dec 05 20:14:29 crc kubenswrapper[4982]: I1205 20:14:29.065032 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_fe591891-51d5-49c8-880a-213703150e27/prometheus-operator-admission-webhook/0.log" Dec 05 20:14:29 crc kubenswrapper[4982]: I1205 20:14:29.228235 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-nwfp4_abe3cbe3-f02e-4fc5-81e5-cb02da29d18b/operator/0.log" Dec 05 20:14:29 crc kubenswrapper[4982]: I1205 20:14:29.291790 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-lz5fb_15088f20-542f-426d-9e0f-cfb52b660483/perses-operator/0.log" Dec 05 20:14:42 crc kubenswrapper[4982]: I1205 20:14:42.558420 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:14:42 crc kubenswrapper[4982]: I1205 20:14:42.558964 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:14:43 crc kubenswrapper[4982]: I1205 20:14:43.459518 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-684f549574-q5qzv_362ad9c9-a652-4965-9d36-10c0332bff02/kube-rbac-proxy/0.log" Dec 05 20:14:43 crc kubenswrapper[4982]: I1205 20:14:43.471951 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-684f549574-q5qzv_362ad9c9-a652-4965-9d36-10c0332bff02/manager/0.log" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.195709 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8"] Dec 05 20:15:00 crc kubenswrapper[4982]: E1205 20:15:00.196692 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1542f7d8-66d3-45e5-b42f-54438b594db1" containerName="container-00" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.196705 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="1542f7d8-66d3-45e5-b42f-54438b594db1" containerName="container-00" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.196910 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="1542f7d8-66d3-45e5-b42f-54438b594db1" containerName="container-00" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.197725 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.200318 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.200539 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.213306 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8"] Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.292868 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlpdf\" (UniqueName: \"kubernetes.io/projected/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-kube-api-access-nlpdf\") pod \"collect-profiles-29416095-lpdc8\" (UID: \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.293008 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-config-volume\") pod \"collect-profiles-29416095-lpdc8\" (UID: \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.293058 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-secret-volume\") pod \"collect-profiles-29416095-lpdc8\" (UID: \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.394551 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-config-volume\") pod \"collect-profiles-29416095-lpdc8\" (UID: \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.394630 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-secret-volume\") pod \"collect-profiles-29416095-lpdc8\" (UID: \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.394812 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlpdf\" (UniqueName: \"kubernetes.io/projected/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-kube-api-access-nlpdf\") pod \"collect-profiles-29416095-lpdc8\" (UID: \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.397470 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-config-volume\") pod \"collect-profiles-29416095-lpdc8\" (UID: \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.404526 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-secret-volume\") pod \"collect-profiles-29416095-lpdc8\" (UID: \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.415798 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlpdf\" (UniqueName: \"kubernetes.io/projected/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-kube-api-access-nlpdf\") pod \"collect-profiles-29416095-lpdc8\" (UID: \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:00 crc kubenswrapper[4982]: I1205 20:15:00.516313 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:01 crc kubenswrapper[4982]: I1205 20:15:01.080241 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8"] Dec 05 20:15:01 crc kubenswrapper[4982]: I1205 20:15:01.217781 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" event={"ID":"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f","Type":"ContainerStarted","Data":"c94f4336af00bcd3b648e59f107990f1da63ec4bef33b18a9e5025b62d236fa7"} Dec 05 20:15:02 crc kubenswrapper[4982]: I1205 20:15:02.228839 4982 generic.go:334] "Generic (PLEG): container finished" podID="d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f" containerID="9e7b55bf3a4f5866989b898416ac6473ffe72ec27afff4d51bd30a59028785d7" exitCode=0 Dec 05 20:15:02 crc kubenswrapper[4982]: I1205 20:15:02.228946 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" event={"ID":"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f","Type":"ContainerDied","Data":"9e7b55bf3a4f5866989b898416ac6473ffe72ec27afff4d51bd30a59028785d7"} Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:03.770512 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:03.874333 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-config-volume\") pod \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\" (UID: \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\") " Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:03.874408 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-secret-volume\") pod \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\" (UID: \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\") " Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:03.874472 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlpdf\" (UniqueName: \"kubernetes.io/projected/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-kube-api-access-nlpdf\") pod \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\" (UID: \"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f\") " Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:03.875283 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-config-volume" (OuterVolumeSpecName: "config-volume") pod "d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f" (UID: "d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:03.894745 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-kube-api-access-nlpdf" (OuterVolumeSpecName: "kube-api-access-nlpdf") pod "d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f" (UID: "d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f"). InnerVolumeSpecName "kube-api-access-nlpdf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:03.898352 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f" (UID: "d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:03.976547 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:03.976581 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:03.976595 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlpdf\" (UniqueName: \"kubernetes.io/projected/d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f-kube-api-access-nlpdf\") on node \"crc\" DevicePath \"\"" Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:04.253217 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" event={"ID":"d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f","Type":"ContainerDied","Data":"c94f4336af00bcd3b648e59f107990f1da63ec4bef33b18a9e5025b62d236fa7"} Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:04.253251 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c94f4336af00bcd3b648e59f107990f1da63ec4bef33b18a9e5025b62d236fa7" Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:04.253281 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416095-lpdc8" Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:04.851457 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28"] Dec 05 20:15:04 crc kubenswrapper[4982]: I1205 20:15:04.861028 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416050-m4f28"] Dec 05 20:15:05 crc kubenswrapper[4982]: I1205 20:15:05.405908 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33b1f837-d120-4042-9b3d-5b31342d5ebc" path="/var/lib/kubelet/pods/33b1f837-d120-4042-9b3d-5b31342d5ebc/volumes" Dec 05 20:15:12 crc kubenswrapper[4982]: I1205 20:15:12.557532 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:15:12 crc kubenswrapper[4982]: I1205 20:15:12.558101 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:15:12 crc kubenswrapper[4982]: I1205 20:15:12.558169 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 20:15:12 crc kubenswrapper[4982]: I1205 20:15:12.558974 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 20:15:12 crc kubenswrapper[4982]: I1205 20:15:12.559033 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" gracePeriod=600 Dec 05 20:15:12 crc kubenswrapper[4982]: E1205 20:15:12.733625 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:15:13 crc kubenswrapper[4982]: I1205 20:15:13.340839 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" exitCode=0 Dec 05 20:15:13 crc kubenswrapper[4982]: I1205 20:15:13.340979 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c"} Dec 05 20:15:13 crc kubenswrapper[4982]: I1205 20:15:13.341183 4982 scope.go:117] "RemoveContainer" containerID="56457d337403394131ed1ce3ee5674c0b587eb58162b40191049d3e206cb4edf" Dec 05 20:15:13 crc kubenswrapper[4982]: I1205 20:15:13.342132 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:15:13 crc kubenswrapper[4982]: E1205 20:15:13.342407 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:15:26 crc kubenswrapper[4982]: I1205 20:15:26.390138 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:15:26 crc kubenswrapper[4982]: E1205 20:15:26.391251 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:15:39 crc kubenswrapper[4982]: I1205 20:15:39.394878 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:15:39 crc kubenswrapper[4982]: E1205 20:15:39.397681 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:15:51 crc kubenswrapper[4982]: I1205 20:15:51.866373 4982 scope.go:117] "RemoveContainer" containerID="ae422ad15b136efb8454598c3111d2668dff23e783585be1e0158da0ac726975" Dec 05 20:15:52 crc kubenswrapper[4982]: I1205 20:15:52.391179 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:15:52 crc kubenswrapper[4982]: E1205 20:15:52.391673 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:16:07 crc kubenswrapper[4982]: I1205 20:16:07.390519 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:16:07 crc kubenswrapper[4982]: E1205 20:16:07.391262 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:16:20 crc kubenswrapper[4982]: I1205 20:16:20.390875 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:16:20 crc kubenswrapper[4982]: E1205 20:16:20.392402 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:16:27 crc kubenswrapper[4982]: I1205 20:16:27.161345 4982 generic.go:334] "Generic (PLEG): container finished" podID="2a3151fb-529a-4450-9fbf-73b1afccb744" containerID="7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3" exitCode=0 Dec 05 20:16:27 crc kubenswrapper[4982]: I1205 20:16:27.161527 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-9pdw7/must-gather-r498v" event={"ID":"2a3151fb-529a-4450-9fbf-73b1afccb744","Type":"ContainerDied","Data":"7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3"} Dec 05 20:16:27 crc kubenswrapper[4982]: I1205 20:16:27.163535 4982 scope.go:117] "RemoveContainer" containerID="7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3" Dec 05 20:16:27 crc kubenswrapper[4982]: I1205 20:16:27.782348 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9pdw7_must-gather-r498v_2a3151fb-529a-4450-9fbf-73b1afccb744/gather/0.log" Dec 05 20:16:35 crc kubenswrapper[4982]: I1205 20:16:35.391418 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:16:35 crc kubenswrapper[4982]: E1205 20:16:35.393100 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:16:35 crc kubenswrapper[4982]: I1205 20:16:35.423678 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-9pdw7/must-gather-r498v"] Dec 05 20:16:35 crc kubenswrapper[4982]: I1205 20:16:35.424048 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-9pdw7/must-gather-r498v" podUID="2a3151fb-529a-4450-9fbf-73b1afccb744" containerName="copy" containerID="cri-o://7bab05588e81b3c6205d0db06aafb687ed71a77c8ef4e42eeda4d301c84f2a01" gracePeriod=2 Dec 05 20:16:35 crc kubenswrapper[4982]: I1205 20:16:35.438766 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-9pdw7/must-gather-r498v"] Dec 05 20:16:35 crc kubenswrapper[4982]: I1205 20:16:35.952537 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9pdw7_must-gather-r498v_2a3151fb-529a-4450-9fbf-73b1afccb744/copy/0.log" Dec 05 20:16:35 crc kubenswrapper[4982]: I1205 20:16:35.955502 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/must-gather-r498v" Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.075424 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4sdps\" (UniqueName: \"kubernetes.io/projected/2a3151fb-529a-4450-9fbf-73b1afccb744-kube-api-access-4sdps\") pod \"2a3151fb-529a-4450-9fbf-73b1afccb744\" (UID: \"2a3151fb-529a-4450-9fbf-73b1afccb744\") " Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.075495 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2a3151fb-529a-4450-9fbf-73b1afccb744-must-gather-output\") pod \"2a3151fb-529a-4450-9fbf-73b1afccb744\" (UID: \"2a3151fb-529a-4450-9fbf-73b1afccb744\") " Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.105404 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a3151fb-529a-4450-9fbf-73b1afccb744-kube-api-access-4sdps" (OuterVolumeSpecName: "kube-api-access-4sdps") pod "2a3151fb-529a-4450-9fbf-73b1afccb744" (UID: "2a3151fb-529a-4450-9fbf-73b1afccb744"). InnerVolumeSpecName "kube-api-access-4sdps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.177948 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4sdps\" (UniqueName: \"kubernetes.io/projected/2a3151fb-529a-4450-9fbf-73b1afccb744-kube-api-access-4sdps\") on node \"crc\" DevicePath \"\"" Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.249174 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a3151fb-529a-4450-9fbf-73b1afccb744-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "2a3151fb-529a-4450-9fbf-73b1afccb744" (UID: "2a3151fb-529a-4450-9fbf-73b1afccb744"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.262672 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-9pdw7_must-gather-r498v_2a3151fb-529a-4450-9fbf-73b1afccb744/copy/0.log" Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.263123 4982 generic.go:334] "Generic (PLEG): container finished" podID="2a3151fb-529a-4450-9fbf-73b1afccb744" containerID="7bab05588e81b3c6205d0db06aafb687ed71a77c8ef4e42eeda4d301c84f2a01" exitCode=143 Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.263208 4982 scope.go:117] "RemoveContainer" containerID="7bab05588e81b3c6205d0db06aafb687ed71a77c8ef4e42eeda4d301c84f2a01" Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.263204 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-9pdw7/must-gather-r498v" Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.280645 4982 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/2a3151fb-529a-4450-9fbf-73b1afccb744-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.284947 4982 scope.go:117] "RemoveContainer" containerID="7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3" Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.361891 4982 scope.go:117] "RemoveContainer" containerID="7bab05588e81b3c6205d0db06aafb687ed71a77c8ef4e42eeda4d301c84f2a01" Dec 05 20:16:36 crc kubenswrapper[4982]: E1205 20:16:36.362377 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bab05588e81b3c6205d0db06aafb687ed71a77c8ef4e42eeda4d301c84f2a01\": container with ID starting with 7bab05588e81b3c6205d0db06aafb687ed71a77c8ef4e42eeda4d301c84f2a01 not found: ID does not exist" containerID="7bab05588e81b3c6205d0db06aafb687ed71a77c8ef4e42eeda4d301c84f2a01" Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.362417 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bab05588e81b3c6205d0db06aafb687ed71a77c8ef4e42eeda4d301c84f2a01"} err="failed to get container status \"7bab05588e81b3c6205d0db06aafb687ed71a77c8ef4e42eeda4d301c84f2a01\": rpc error: code = NotFound desc = could not find container \"7bab05588e81b3c6205d0db06aafb687ed71a77c8ef4e42eeda4d301c84f2a01\": container with ID starting with 7bab05588e81b3c6205d0db06aafb687ed71a77c8ef4e42eeda4d301c84f2a01 not found: ID does not exist" Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.362443 4982 scope.go:117] "RemoveContainer" containerID="7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3" Dec 05 20:16:36 crc kubenswrapper[4982]: E1205 20:16:36.362887 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3\": container with ID starting with 7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3 not found: ID does not exist" containerID="7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3" Dec 05 20:16:36 crc kubenswrapper[4982]: I1205 20:16:36.362918 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3"} err="failed to get container status \"7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3\": rpc error: code = NotFound desc = could not find container \"7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3\": container with ID starting with 7c57969d3c45b9d3b9d37d075ca17c74457cc0ed91611ad309417c02962824a3 not found: ID does not exist" Dec 05 20:16:37 crc kubenswrapper[4982]: I1205 20:16:37.404344 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a3151fb-529a-4450-9fbf-73b1afccb744" path="/var/lib/kubelet/pods/2a3151fb-529a-4450-9fbf-73b1afccb744/volumes" Dec 05 20:16:46 crc kubenswrapper[4982]: I1205 20:16:46.390556 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:16:46 crc kubenswrapper[4982]: E1205 20:16:46.391459 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.819725 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zbbn5"] Dec 05 20:16:54 crc kubenswrapper[4982]: E1205 20:16:54.820890 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f" containerName="collect-profiles" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.820903 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f" containerName="collect-profiles" Dec 05 20:16:54 crc kubenswrapper[4982]: E1205 20:16:54.820935 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a3151fb-529a-4450-9fbf-73b1afccb744" containerName="copy" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.820941 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a3151fb-529a-4450-9fbf-73b1afccb744" containerName="copy" Dec 05 20:16:54 crc kubenswrapper[4982]: E1205 20:16:54.820957 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a3151fb-529a-4450-9fbf-73b1afccb744" containerName="gather" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.820964 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a3151fb-529a-4450-9fbf-73b1afccb744" containerName="gather" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.821161 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a3151fb-529a-4450-9fbf-73b1afccb744" containerName="gather" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.821176 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a3151fb-529a-4450-9fbf-73b1afccb744" containerName="copy" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.821183 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d05a3f07-fc9e-4da6-b38c-a0eeb3fe928f" containerName="collect-profiles" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.822814 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.839143 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zbbn5"] Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.897257 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-utilities\") pod \"certified-operators-zbbn5\" (UID: \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\") " pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.897417 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-catalog-content\") pod \"certified-operators-zbbn5\" (UID: \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\") " pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.897533 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p46qc\" (UniqueName: \"kubernetes.io/projected/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-kube-api-access-p46qc\") pod \"certified-operators-zbbn5\" (UID: \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\") " pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.999401 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p46qc\" (UniqueName: \"kubernetes.io/projected/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-kube-api-access-p46qc\") pod \"certified-operators-zbbn5\" (UID: \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\") " pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.999493 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-utilities\") pod \"certified-operators-zbbn5\" (UID: \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\") " pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:16:54 crc kubenswrapper[4982]: I1205 20:16:54.999576 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-catalog-content\") pod \"certified-operators-zbbn5\" (UID: \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\") " pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:16:55 crc kubenswrapper[4982]: I1205 20:16:54.999986 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-utilities\") pod \"certified-operators-zbbn5\" (UID: \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\") " pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:16:55 crc kubenswrapper[4982]: I1205 20:16:55.000033 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-catalog-content\") pod \"certified-operators-zbbn5\" (UID: \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\") " pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:16:55 crc kubenswrapper[4982]: I1205 20:16:55.026051 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p46qc\" (UniqueName: \"kubernetes.io/projected/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-kube-api-access-p46qc\") pod \"certified-operators-zbbn5\" (UID: \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\") " pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:16:55 crc kubenswrapper[4982]: I1205 20:16:55.149601 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:16:55 crc kubenswrapper[4982]: I1205 20:16:55.742726 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zbbn5"] Dec 05 20:16:56 crc kubenswrapper[4982]: I1205 20:16:56.475139 4982 generic.go:334] "Generic (PLEG): container finished" podID="7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" containerID="fc987057fd2e4693e8effc94ffa58c432a0599fea14067456984484483c4fd31" exitCode=0 Dec 05 20:16:56 crc kubenswrapper[4982]: I1205 20:16:56.475245 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbbn5" event={"ID":"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7","Type":"ContainerDied","Data":"fc987057fd2e4693e8effc94ffa58c432a0599fea14067456984484483c4fd31"} Dec 05 20:16:56 crc kubenswrapper[4982]: I1205 20:16:56.475483 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbbn5" event={"ID":"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7","Type":"ContainerStarted","Data":"2db59624a629a22dfff791948ae512728761ef6d88584837f9e8c83a874cddd0"} Dec 05 20:16:56 crc kubenswrapper[4982]: I1205 20:16:56.476938 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 20:16:58 crc kubenswrapper[4982]: I1205 20:16:58.390978 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:16:58 crc kubenswrapper[4982]: E1205 20:16:58.391928 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:16:58 crc kubenswrapper[4982]: I1205 20:16:58.496833 4982 generic.go:334] "Generic (PLEG): container finished" podID="7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" containerID="5e65eb592faebe27045e0ed6b41dfa54890da9c583c5a803f822af21a6047f5c" exitCode=0 Dec 05 20:16:58 crc kubenswrapper[4982]: I1205 20:16:58.496906 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbbn5" event={"ID":"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7","Type":"ContainerDied","Data":"5e65eb592faebe27045e0ed6b41dfa54890da9c583c5a803f822af21a6047f5c"} Dec 05 20:16:59 crc kubenswrapper[4982]: I1205 20:16:59.511647 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbbn5" event={"ID":"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7","Type":"ContainerStarted","Data":"f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac"} Dec 05 20:16:59 crc kubenswrapper[4982]: I1205 20:16:59.540714 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zbbn5" podStartSLOduration=3.047972651 podStartE2EDuration="5.540688544s" podCreationTimestamp="2025-12-05 20:16:54 +0000 UTC" firstStartedPulling="2025-12-05 20:16:56.476679549 +0000 UTC m=+3795.358565554" lastFinishedPulling="2025-12-05 20:16:58.969395452 +0000 UTC m=+3797.851281447" observedRunningTime="2025-12-05 20:16:59.532486432 +0000 UTC m=+3798.414372447" watchObservedRunningTime="2025-12-05 20:16:59.540688544 +0000 UTC m=+3798.422574539" Dec 05 20:17:02 crc kubenswrapper[4982]: I1205 20:17:02.820008 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8hpsg"] Dec 05 20:17:02 crc kubenswrapper[4982]: I1205 20:17:02.822940 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:02 crc kubenswrapper[4982]: I1205 20:17:02.859405 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8hpsg"] Dec 05 20:17:02 crc kubenswrapper[4982]: I1205 20:17:02.918614 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xffv\" (UniqueName: \"kubernetes.io/projected/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-kube-api-access-9xffv\") pod \"community-operators-8hpsg\" (UID: \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\") " pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:02 crc kubenswrapper[4982]: I1205 20:17:02.918841 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-catalog-content\") pod \"community-operators-8hpsg\" (UID: \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\") " pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:02 crc kubenswrapper[4982]: I1205 20:17:02.918892 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-utilities\") pod \"community-operators-8hpsg\" (UID: \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\") " pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:03 crc kubenswrapper[4982]: I1205 20:17:03.021209 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-catalog-content\") pod \"community-operators-8hpsg\" (UID: \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\") " pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:03 crc kubenswrapper[4982]: I1205 20:17:03.021513 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-utilities\") pod \"community-operators-8hpsg\" (UID: \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\") " pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:03 crc kubenswrapper[4982]: I1205 20:17:03.021733 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xffv\" (UniqueName: \"kubernetes.io/projected/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-kube-api-access-9xffv\") pod \"community-operators-8hpsg\" (UID: \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\") " pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:03 crc kubenswrapper[4982]: I1205 20:17:03.021869 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-catalog-content\") pod \"community-operators-8hpsg\" (UID: \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\") " pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:03 crc kubenswrapper[4982]: I1205 20:17:03.022189 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-utilities\") pod \"community-operators-8hpsg\" (UID: \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\") " pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:03 crc kubenswrapper[4982]: I1205 20:17:03.049253 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xffv\" (UniqueName: \"kubernetes.io/projected/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-kube-api-access-9xffv\") pod \"community-operators-8hpsg\" (UID: \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\") " pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:03 crc kubenswrapper[4982]: I1205 20:17:03.153096 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:03 crc kubenswrapper[4982]: I1205 20:17:03.703080 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8hpsg"] Dec 05 20:17:03 crc kubenswrapper[4982]: W1205 20:17:03.705490 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46cdc2c4_7bda_4a7d_9ae6_f5ea32b536d5.slice/crio-f63cd87b6ce33aace070c0a055c64f64731af7af7d5ff204141e814636108fa9 WatchSource:0}: Error finding container f63cd87b6ce33aace070c0a055c64f64731af7af7d5ff204141e814636108fa9: Status 404 returned error can't find the container with id f63cd87b6ce33aace070c0a055c64f64731af7af7d5ff204141e814636108fa9 Dec 05 20:17:04 crc kubenswrapper[4982]: I1205 20:17:04.558072 4982 generic.go:334] "Generic (PLEG): container finished" podID="46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" containerID="00795a2f4d61fd9f2640b01f22ddf9594ca06526b472b754ddb77b27edc207c1" exitCode=0 Dec 05 20:17:04 crc kubenswrapper[4982]: I1205 20:17:04.558176 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8hpsg" event={"ID":"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5","Type":"ContainerDied","Data":"00795a2f4d61fd9f2640b01f22ddf9594ca06526b472b754ddb77b27edc207c1"} Dec 05 20:17:04 crc kubenswrapper[4982]: I1205 20:17:04.558399 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8hpsg" event={"ID":"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5","Type":"ContainerStarted","Data":"f63cd87b6ce33aace070c0a055c64f64731af7af7d5ff204141e814636108fa9"} Dec 05 20:17:05 crc kubenswrapper[4982]: I1205 20:17:05.149778 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:17:05 crc kubenswrapper[4982]: I1205 20:17:05.150126 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:17:05 crc kubenswrapper[4982]: I1205 20:17:05.204337 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:17:05 crc kubenswrapper[4982]: I1205 20:17:05.576424 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8hpsg" event={"ID":"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5","Type":"ContainerStarted","Data":"d23e2442d1fe711765316ed2c53069dca0b4174ad26f1a06b5c2cc3538843c0b"} Dec 05 20:17:05 crc kubenswrapper[4982]: I1205 20:17:05.657017 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:17:06 crc kubenswrapper[4982]: I1205 20:17:06.583897 4982 generic.go:334] "Generic (PLEG): container finished" podID="46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" containerID="d23e2442d1fe711765316ed2c53069dca0b4174ad26f1a06b5c2cc3538843c0b" exitCode=0 Dec 05 20:17:06 crc kubenswrapper[4982]: I1205 20:17:06.583970 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8hpsg" event={"ID":"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5","Type":"ContainerDied","Data":"d23e2442d1fe711765316ed2c53069dca0b4174ad26f1a06b5c2cc3538843c0b"} Dec 05 20:17:07 crc kubenswrapper[4982]: I1205 20:17:07.595621 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zbbn5"] Dec 05 20:17:07 crc kubenswrapper[4982]: I1205 20:17:07.596315 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8hpsg" event={"ID":"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5","Type":"ContainerStarted","Data":"0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb"} Dec 05 20:17:07 crc kubenswrapper[4982]: I1205 20:17:07.596891 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zbbn5" podUID="7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" containerName="registry-server" containerID="cri-o://f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac" gracePeriod=2 Dec 05 20:17:07 crc kubenswrapper[4982]: I1205 20:17:07.627659 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8hpsg" podStartSLOduration=3.195237391 podStartE2EDuration="5.627642313s" podCreationTimestamp="2025-12-05 20:17:02 +0000 UTC" firstStartedPulling="2025-12-05 20:17:04.559931429 +0000 UTC m=+3803.441817424" lastFinishedPulling="2025-12-05 20:17:06.992336351 +0000 UTC m=+3805.874222346" observedRunningTime="2025-12-05 20:17:07.618833647 +0000 UTC m=+3806.500719642" watchObservedRunningTime="2025-12-05 20:17:07.627642313 +0000 UTC m=+3806.509528308" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.240417 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.330378 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-utilities\") pod \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\" (UID: \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\") " Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.331426 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-utilities" (OuterVolumeSpecName: "utilities") pod "7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" (UID: "7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.432409 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p46qc\" (UniqueName: \"kubernetes.io/projected/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-kube-api-access-p46qc\") pod \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\" (UID: \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\") " Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.432562 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-catalog-content\") pod \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\" (UID: \"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7\") " Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.433177 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.442347 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-kube-api-access-p46qc" (OuterVolumeSpecName: "kube-api-access-p46qc") pod "7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" (UID: "7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7"). InnerVolumeSpecName "kube-api-access-p46qc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.502355 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" (UID: "7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.534586 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p46qc\" (UniqueName: \"kubernetes.io/projected/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-kube-api-access-p46qc\") on node \"crc\" DevicePath \"\"" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.534618 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.640660 4982 generic.go:334] "Generic (PLEG): container finished" podID="7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" containerID="f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac" exitCode=0 Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.641635 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zbbn5" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.642363 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbbn5" event={"ID":"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7","Type":"ContainerDied","Data":"f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac"} Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.642420 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zbbn5" event={"ID":"7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7","Type":"ContainerDied","Data":"2db59624a629a22dfff791948ae512728761ef6d88584837f9e8c83a874cddd0"} Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.642438 4982 scope.go:117] "RemoveContainer" containerID="f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.671595 4982 scope.go:117] "RemoveContainer" containerID="5e65eb592faebe27045e0ed6b41dfa54890da9c583c5a803f822af21a6047f5c" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.677103 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zbbn5"] Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.684644 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zbbn5"] Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.696475 4982 scope.go:117] "RemoveContainer" containerID="fc987057fd2e4693e8effc94ffa58c432a0599fea14067456984484483c4fd31" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.758360 4982 scope.go:117] "RemoveContainer" containerID="f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac" Dec 05 20:17:08 crc kubenswrapper[4982]: E1205 20:17:08.760474 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac\": container with ID starting with f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac not found: ID does not exist" containerID="f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.760520 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac"} err="failed to get container status \"f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac\": rpc error: code = NotFound desc = could not find container \"f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac\": container with ID starting with f522eaca5d8b9d75466ad9ab6aba66b55c21caa10bd1684ad61b8a84a6349dac not found: ID does not exist" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.760550 4982 scope.go:117] "RemoveContainer" containerID="5e65eb592faebe27045e0ed6b41dfa54890da9c583c5a803f822af21a6047f5c" Dec 05 20:17:08 crc kubenswrapper[4982]: E1205 20:17:08.761017 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e65eb592faebe27045e0ed6b41dfa54890da9c583c5a803f822af21a6047f5c\": container with ID starting with 5e65eb592faebe27045e0ed6b41dfa54890da9c583c5a803f822af21a6047f5c not found: ID does not exist" containerID="5e65eb592faebe27045e0ed6b41dfa54890da9c583c5a803f822af21a6047f5c" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.761063 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e65eb592faebe27045e0ed6b41dfa54890da9c583c5a803f822af21a6047f5c"} err="failed to get container status \"5e65eb592faebe27045e0ed6b41dfa54890da9c583c5a803f822af21a6047f5c\": rpc error: code = NotFound desc = could not find container \"5e65eb592faebe27045e0ed6b41dfa54890da9c583c5a803f822af21a6047f5c\": container with ID starting with 5e65eb592faebe27045e0ed6b41dfa54890da9c583c5a803f822af21a6047f5c not found: ID does not exist" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.761088 4982 scope.go:117] "RemoveContainer" containerID="fc987057fd2e4693e8effc94ffa58c432a0599fea14067456984484483c4fd31" Dec 05 20:17:08 crc kubenswrapper[4982]: E1205 20:17:08.762069 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc987057fd2e4693e8effc94ffa58c432a0599fea14067456984484483c4fd31\": container with ID starting with fc987057fd2e4693e8effc94ffa58c432a0599fea14067456984484483c4fd31 not found: ID does not exist" containerID="fc987057fd2e4693e8effc94ffa58c432a0599fea14067456984484483c4fd31" Dec 05 20:17:08 crc kubenswrapper[4982]: I1205 20:17:08.762094 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc987057fd2e4693e8effc94ffa58c432a0599fea14067456984484483c4fd31"} err="failed to get container status \"fc987057fd2e4693e8effc94ffa58c432a0599fea14067456984484483c4fd31\": rpc error: code = NotFound desc = could not find container \"fc987057fd2e4693e8effc94ffa58c432a0599fea14067456984484483c4fd31\": container with ID starting with fc987057fd2e4693e8effc94ffa58c432a0599fea14067456984484483c4fd31 not found: ID does not exist" Dec 05 20:17:09 crc kubenswrapper[4982]: I1205 20:17:09.405326 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" path="/var/lib/kubelet/pods/7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7/volumes" Dec 05 20:17:13 crc kubenswrapper[4982]: I1205 20:17:13.154487 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:13 crc kubenswrapper[4982]: I1205 20:17:13.155086 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:13 crc kubenswrapper[4982]: I1205 20:17:13.214241 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:13 crc kubenswrapper[4982]: I1205 20:17:13.390105 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:17:13 crc kubenswrapper[4982]: E1205 20:17:13.390418 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:17:13 crc kubenswrapper[4982]: I1205 20:17:13.756078 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:13 crc kubenswrapper[4982]: I1205 20:17:13.810826 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8hpsg"] Dec 05 20:17:15 crc kubenswrapper[4982]: I1205 20:17:15.705797 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8hpsg" podUID="46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" containerName="registry-server" containerID="cri-o://0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb" gracePeriod=2 Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.436461 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.608667 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-utilities\") pod \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\" (UID: \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\") " Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.609062 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xffv\" (UniqueName: \"kubernetes.io/projected/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-kube-api-access-9xffv\") pod \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\" (UID: \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\") " Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.609198 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-catalog-content\") pod \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\" (UID: \"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5\") " Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.609991 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-utilities" (OuterVolumeSpecName: "utilities") pod "46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" (UID: "46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.615598 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-kube-api-access-9xffv" (OuterVolumeSpecName: "kube-api-access-9xffv") pod "46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" (UID: "46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5"). InnerVolumeSpecName "kube-api-access-9xffv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.661615 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" (UID: "46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.711102 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.712239 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xffv\" (UniqueName: \"kubernetes.io/projected/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-kube-api-access-9xffv\") on node \"crc\" DevicePath \"\"" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.712318 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.716506 4982 generic.go:334] "Generic (PLEG): container finished" podID="46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" containerID="0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb" exitCode=0 Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.716553 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8hpsg" event={"ID":"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5","Type":"ContainerDied","Data":"0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb"} Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.716729 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8hpsg" event={"ID":"46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5","Type":"ContainerDied","Data":"f63cd87b6ce33aace070c0a055c64f64731af7af7d5ff204141e814636108fa9"} Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.716590 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8hpsg" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.716815 4982 scope.go:117] "RemoveContainer" containerID="0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.747382 4982 scope.go:117] "RemoveContainer" containerID="d23e2442d1fe711765316ed2c53069dca0b4174ad26f1a06b5c2cc3538843c0b" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.777221 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8hpsg"] Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.790358 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-8hpsg"] Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.796435 4982 scope.go:117] "RemoveContainer" containerID="00795a2f4d61fd9f2640b01f22ddf9594ca06526b472b754ddb77b27edc207c1" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.832329 4982 scope.go:117] "RemoveContainer" containerID="0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb" Dec 05 20:17:16 crc kubenswrapper[4982]: E1205 20:17:16.832910 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb\": container with ID starting with 0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb not found: ID does not exist" containerID="0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.832943 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb"} err="failed to get container status \"0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb\": rpc error: code = NotFound desc = could not find container \"0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb\": container with ID starting with 0cac745bcb16a472e2a446c73b883079b179cd47e8ce948866ee1b69d9f9aeeb not found: ID does not exist" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.832964 4982 scope.go:117] "RemoveContainer" containerID="d23e2442d1fe711765316ed2c53069dca0b4174ad26f1a06b5c2cc3538843c0b" Dec 05 20:17:16 crc kubenswrapper[4982]: E1205 20:17:16.833314 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d23e2442d1fe711765316ed2c53069dca0b4174ad26f1a06b5c2cc3538843c0b\": container with ID starting with d23e2442d1fe711765316ed2c53069dca0b4174ad26f1a06b5c2cc3538843c0b not found: ID does not exist" containerID="d23e2442d1fe711765316ed2c53069dca0b4174ad26f1a06b5c2cc3538843c0b" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.833342 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d23e2442d1fe711765316ed2c53069dca0b4174ad26f1a06b5c2cc3538843c0b"} err="failed to get container status \"d23e2442d1fe711765316ed2c53069dca0b4174ad26f1a06b5c2cc3538843c0b\": rpc error: code = NotFound desc = could not find container \"d23e2442d1fe711765316ed2c53069dca0b4174ad26f1a06b5c2cc3538843c0b\": container with ID starting with d23e2442d1fe711765316ed2c53069dca0b4174ad26f1a06b5c2cc3538843c0b not found: ID does not exist" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.833360 4982 scope.go:117] "RemoveContainer" containerID="00795a2f4d61fd9f2640b01f22ddf9594ca06526b472b754ddb77b27edc207c1" Dec 05 20:17:16 crc kubenswrapper[4982]: E1205 20:17:16.833757 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00795a2f4d61fd9f2640b01f22ddf9594ca06526b472b754ddb77b27edc207c1\": container with ID starting with 00795a2f4d61fd9f2640b01f22ddf9594ca06526b472b754ddb77b27edc207c1 not found: ID does not exist" containerID="00795a2f4d61fd9f2640b01f22ddf9594ca06526b472b754ddb77b27edc207c1" Dec 05 20:17:16 crc kubenswrapper[4982]: I1205 20:17:16.833789 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00795a2f4d61fd9f2640b01f22ddf9594ca06526b472b754ddb77b27edc207c1"} err="failed to get container status \"00795a2f4d61fd9f2640b01f22ddf9594ca06526b472b754ddb77b27edc207c1\": rpc error: code = NotFound desc = could not find container \"00795a2f4d61fd9f2640b01f22ddf9594ca06526b472b754ddb77b27edc207c1\": container with ID starting with 00795a2f4d61fd9f2640b01f22ddf9594ca06526b472b754ddb77b27edc207c1 not found: ID does not exist" Dec 05 20:17:17 crc kubenswrapper[4982]: I1205 20:17:17.401745 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" path="/var/lib/kubelet/pods/46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5/volumes" Dec 05 20:17:25 crc kubenswrapper[4982]: I1205 20:17:25.560787 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:17:25 crc kubenswrapper[4982]: E1205 20:17:25.561712 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:17:26 crc kubenswrapper[4982]: I1205 20:17:26.987112 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kg6tg"] Dec 05 20:17:26 crc kubenswrapper[4982]: E1205 20:17:26.987887 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" containerName="extract-content" Dec 05 20:17:26 crc kubenswrapper[4982]: I1205 20:17:26.987903 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" containerName="extract-content" Dec 05 20:17:26 crc kubenswrapper[4982]: E1205 20:17:26.987924 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" containerName="registry-server" Dec 05 20:17:26 crc kubenswrapper[4982]: I1205 20:17:26.987931 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" containerName="registry-server" Dec 05 20:17:26 crc kubenswrapper[4982]: E1205 20:17:26.987946 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" containerName="extract-utilities" Dec 05 20:17:26 crc kubenswrapper[4982]: I1205 20:17:26.987952 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" containerName="extract-utilities" Dec 05 20:17:26 crc kubenswrapper[4982]: E1205 20:17:26.987974 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" containerName="extract-utilities" Dec 05 20:17:26 crc kubenswrapper[4982]: I1205 20:17:26.987982 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" containerName="extract-utilities" Dec 05 20:17:26 crc kubenswrapper[4982]: E1205 20:17:26.987997 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" containerName="extract-content" Dec 05 20:17:26 crc kubenswrapper[4982]: I1205 20:17:26.988005 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" containerName="extract-content" Dec 05 20:17:26 crc kubenswrapper[4982]: E1205 20:17:26.988018 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" containerName="registry-server" Dec 05 20:17:26 crc kubenswrapper[4982]: I1205 20:17:26.988027 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" containerName="registry-server" Dec 05 20:17:26 crc kubenswrapper[4982]: I1205 20:17:26.988219 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="46cdc2c4-7bda-4a7d-9ae6-f5ea32b536d5" containerName="registry-server" Dec 05 20:17:26 crc kubenswrapper[4982]: I1205 20:17:26.988249 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c23bd73-5d8c-40cf-ad5b-873e4cdb60d7" containerName="registry-server" Dec 05 20:17:26 crc kubenswrapper[4982]: I1205 20:17:26.989802 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.048500 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg6tg"] Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.088535 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/041ff87b-490c-4a46-9803-c12b6887b9ee-catalog-content\") pod \"redhat-marketplace-kg6tg\" (UID: \"041ff87b-490c-4a46-9803-c12b6887b9ee\") " pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.088618 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/041ff87b-490c-4a46-9803-c12b6887b9ee-utilities\") pod \"redhat-marketplace-kg6tg\" (UID: \"041ff87b-490c-4a46-9803-c12b6887b9ee\") " pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.088703 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p54km\" (UniqueName: \"kubernetes.io/projected/041ff87b-490c-4a46-9803-c12b6887b9ee-kube-api-access-p54km\") pod \"redhat-marketplace-kg6tg\" (UID: \"041ff87b-490c-4a46-9803-c12b6887b9ee\") " pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.190146 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p54km\" (UniqueName: \"kubernetes.io/projected/041ff87b-490c-4a46-9803-c12b6887b9ee-kube-api-access-p54km\") pod \"redhat-marketplace-kg6tg\" (UID: \"041ff87b-490c-4a46-9803-c12b6887b9ee\") " pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.190347 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/041ff87b-490c-4a46-9803-c12b6887b9ee-catalog-content\") pod \"redhat-marketplace-kg6tg\" (UID: \"041ff87b-490c-4a46-9803-c12b6887b9ee\") " pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.190396 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/041ff87b-490c-4a46-9803-c12b6887b9ee-utilities\") pod \"redhat-marketplace-kg6tg\" (UID: \"041ff87b-490c-4a46-9803-c12b6887b9ee\") " pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.190849 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/041ff87b-490c-4a46-9803-c12b6887b9ee-utilities\") pod \"redhat-marketplace-kg6tg\" (UID: \"041ff87b-490c-4a46-9803-c12b6887b9ee\") " pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.190917 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/041ff87b-490c-4a46-9803-c12b6887b9ee-catalog-content\") pod \"redhat-marketplace-kg6tg\" (UID: \"041ff87b-490c-4a46-9803-c12b6887b9ee\") " pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.211380 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p54km\" (UniqueName: \"kubernetes.io/projected/041ff87b-490c-4a46-9803-c12b6887b9ee-kube-api-access-p54km\") pod \"redhat-marketplace-kg6tg\" (UID: \"041ff87b-490c-4a46-9803-c12b6887b9ee\") " pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.347418 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.828137 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg6tg"] Dec 05 20:17:27 crc kubenswrapper[4982]: I1205 20:17:27.839603 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg6tg" event={"ID":"041ff87b-490c-4a46-9803-c12b6887b9ee","Type":"ContainerStarted","Data":"08a669f2b7a8c6e661837d2fed3933afdab57a4772eac2e41946b3de8c11c73e"} Dec 05 20:17:28 crc kubenswrapper[4982]: I1205 20:17:28.867745 4982 generic.go:334] "Generic (PLEG): container finished" podID="041ff87b-490c-4a46-9803-c12b6887b9ee" containerID="7aaa49b7d47fafa01297b8be126954a845f0bf2f977518d99e7b1ab56caa8001" exitCode=0 Dec 05 20:17:28 crc kubenswrapper[4982]: I1205 20:17:28.867832 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg6tg" event={"ID":"041ff87b-490c-4a46-9803-c12b6887b9ee","Type":"ContainerDied","Data":"7aaa49b7d47fafa01297b8be126954a845f0bf2f977518d99e7b1ab56caa8001"} Dec 05 20:17:29 crc kubenswrapper[4982]: I1205 20:17:29.880466 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg6tg" event={"ID":"041ff87b-490c-4a46-9803-c12b6887b9ee","Type":"ContainerStarted","Data":"dba543b534558a940a826ef707f589e343e902425752078d0fc9a06b95983d50"} Dec 05 20:17:30 crc kubenswrapper[4982]: I1205 20:17:30.891384 4982 generic.go:334] "Generic (PLEG): container finished" podID="041ff87b-490c-4a46-9803-c12b6887b9ee" containerID="dba543b534558a940a826ef707f589e343e902425752078d0fc9a06b95983d50" exitCode=0 Dec 05 20:17:30 crc kubenswrapper[4982]: I1205 20:17:30.891431 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg6tg" event={"ID":"041ff87b-490c-4a46-9803-c12b6887b9ee","Type":"ContainerDied","Data":"dba543b534558a940a826ef707f589e343e902425752078d0fc9a06b95983d50"} Dec 05 20:17:31 crc kubenswrapper[4982]: I1205 20:17:31.902867 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg6tg" event={"ID":"041ff87b-490c-4a46-9803-c12b6887b9ee","Type":"ContainerStarted","Data":"891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1"} Dec 05 20:17:31 crc kubenswrapper[4982]: I1205 20:17:31.933994 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kg6tg" podStartSLOduration=3.552350837 podStartE2EDuration="5.933972961s" podCreationTimestamp="2025-12-05 20:17:26 +0000 UTC" firstStartedPulling="2025-12-05 20:17:28.872300004 +0000 UTC m=+3827.754186019" lastFinishedPulling="2025-12-05 20:17:31.253922148 +0000 UTC m=+3830.135808143" observedRunningTime="2025-12-05 20:17:31.925861262 +0000 UTC m=+3830.807747277" watchObservedRunningTime="2025-12-05 20:17:31.933972961 +0000 UTC m=+3830.815858956" Dec 05 20:17:37 crc kubenswrapper[4982]: I1205 20:17:37.347738 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:37 crc kubenswrapper[4982]: I1205 20:17:37.348304 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:37 crc kubenswrapper[4982]: I1205 20:17:37.409407 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:38 crc kubenswrapper[4982]: I1205 20:17:38.020693 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:38 crc kubenswrapper[4982]: I1205 20:17:38.084547 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg6tg"] Dec 05 20:17:39 crc kubenswrapper[4982]: I1205 20:17:39.390749 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:17:39 crc kubenswrapper[4982]: E1205 20:17:39.391990 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:17:39 crc kubenswrapper[4982]: I1205 20:17:39.985929 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kg6tg" podUID="041ff87b-490c-4a46-9803-c12b6887b9ee" containerName="registry-server" containerID="cri-o://891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1" gracePeriod=2 Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.539425 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.665894 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p54km\" (UniqueName: \"kubernetes.io/projected/041ff87b-490c-4a46-9803-c12b6887b9ee-kube-api-access-p54km\") pod \"041ff87b-490c-4a46-9803-c12b6887b9ee\" (UID: \"041ff87b-490c-4a46-9803-c12b6887b9ee\") " Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.666225 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/041ff87b-490c-4a46-9803-c12b6887b9ee-catalog-content\") pod \"041ff87b-490c-4a46-9803-c12b6887b9ee\" (UID: \"041ff87b-490c-4a46-9803-c12b6887b9ee\") " Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.666287 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/041ff87b-490c-4a46-9803-c12b6887b9ee-utilities\") pod \"041ff87b-490c-4a46-9803-c12b6887b9ee\" (UID: \"041ff87b-490c-4a46-9803-c12b6887b9ee\") " Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.667143 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/041ff87b-490c-4a46-9803-c12b6887b9ee-utilities" (OuterVolumeSpecName: "utilities") pod "041ff87b-490c-4a46-9803-c12b6887b9ee" (UID: "041ff87b-490c-4a46-9803-c12b6887b9ee"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.673322 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/041ff87b-490c-4a46-9803-c12b6887b9ee-kube-api-access-p54km" (OuterVolumeSpecName: "kube-api-access-p54km") pod "041ff87b-490c-4a46-9803-c12b6887b9ee" (UID: "041ff87b-490c-4a46-9803-c12b6887b9ee"). InnerVolumeSpecName "kube-api-access-p54km". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.691312 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/041ff87b-490c-4a46-9803-c12b6887b9ee-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "041ff87b-490c-4a46-9803-c12b6887b9ee" (UID: "041ff87b-490c-4a46-9803-c12b6887b9ee"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.769194 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/041ff87b-490c-4a46-9803-c12b6887b9ee-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.769233 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/041ff87b-490c-4a46-9803-c12b6887b9ee-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.769242 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p54km\" (UniqueName: \"kubernetes.io/projected/041ff87b-490c-4a46-9803-c12b6887b9ee-kube-api-access-p54km\") on node \"crc\" DevicePath \"\"" Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.998214 4982 generic.go:334] "Generic (PLEG): container finished" podID="041ff87b-490c-4a46-9803-c12b6887b9ee" containerID="891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1" exitCode=0 Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.998262 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg6tg" event={"ID":"041ff87b-490c-4a46-9803-c12b6887b9ee","Type":"ContainerDied","Data":"891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1"} Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.998293 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg6tg" event={"ID":"041ff87b-490c-4a46-9803-c12b6887b9ee","Type":"ContainerDied","Data":"08a669f2b7a8c6e661837d2fed3933afdab57a4772eac2e41946b3de8c11c73e"} Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.998313 4982 scope.go:117] "RemoveContainer" containerID="891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1" Dec 05 20:17:40 crc kubenswrapper[4982]: I1205 20:17:40.998462 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kg6tg" Dec 05 20:17:41 crc kubenswrapper[4982]: I1205 20:17:41.025009 4982 scope.go:117] "RemoveContainer" containerID="dba543b534558a940a826ef707f589e343e902425752078d0fc9a06b95983d50" Dec 05 20:17:41 crc kubenswrapper[4982]: I1205 20:17:41.055349 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg6tg"] Dec 05 20:17:41 crc kubenswrapper[4982]: I1205 20:17:41.064024 4982 scope.go:117] "RemoveContainer" containerID="7aaa49b7d47fafa01297b8be126954a845f0bf2f977518d99e7b1ab56caa8001" Dec 05 20:17:41 crc kubenswrapper[4982]: I1205 20:17:41.064908 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg6tg"] Dec 05 20:17:41 crc kubenswrapper[4982]: I1205 20:17:41.098386 4982 scope.go:117] "RemoveContainer" containerID="891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1" Dec 05 20:17:41 crc kubenswrapper[4982]: E1205 20:17:41.098855 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1\": container with ID starting with 891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1 not found: ID does not exist" containerID="891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1" Dec 05 20:17:41 crc kubenswrapper[4982]: I1205 20:17:41.098902 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1"} err="failed to get container status \"891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1\": rpc error: code = NotFound desc = could not find container \"891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1\": container with ID starting with 891f73a9c1e35789107729e16d8a419eac6d523fdb3b76741d82c23575a0fdb1 not found: ID does not exist" Dec 05 20:17:41 crc kubenswrapper[4982]: I1205 20:17:41.098929 4982 scope.go:117] "RemoveContainer" containerID="dba543b534558a940a826ef707f589e343e902425752078d0fc9a06b95983d50" Dec 05 20:17:41 crc kubenswrapper[4982]: E1205 20:17:41.099287 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dba543b534558a940a826ef707f589e343e902425752078d0fc9a06b95983d50\": container with ID starting with dba543b534558a940a826ef707f589e343e902425752078d0fc9a06b95983d50 not found: ID does not exist" containerID="dba543b534558a940a826ef707f589e343e902425752078d0fc9a06b95983d50" Dec 05 20:17:41 crc kubenswrapper[4982]: I1205 20:17:41.099309 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dba543b534558a940a826ef707f589e343e902425752078d0fc9a06b95983d50"} err="failed to get container status \"dba543b534558a940a826ef707f589e343e902425752078d0fc9a06b95983d50\": rpc error: code = NotFound desc = could not find container \"dba543b534558a940a826ef707f589e343e902425752078d0fc9a06b95983d50\": container with ID starting with dba543b534558a940a826ef707f589e343e902425752078d0fc9a06b95983d50 not found: ID does not exist" Dec 05 20:17:41 crc kubenswrapper[4982]: I1205 20:17:41.099323 4982 scope.go:117] "RemoveContainer" containerID="7aaa49b7d47fafa01297b8be126954a845f0bf2f977518d99e7b1ab56caa8001" Dec 05 20:17:41 crc kubenswrapper[4982]: E1205 20:17:41.099541 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7aaa49b7d47fafa01297b8be126954a845f0bf2f977518d99e7b1ab56caa8001\": container with ID starting with 7aaa49b7d47fafa01297b8be126954a845f0bf2f977518d99e7b1ab56caa8001 not found: ID does not exist" containerID="7aaa49b7d47fafa01297b8be126954a845f0bf2f977518d99e7b1ab56caa8001" Dec 05 20:17:41 crc kubenswrapper[4982]: I1205 20:17:41.099565 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aaa49b7d47fafa01297b8be126954a845f0bf2f977518d99e7b1ab56caa8001"} err="failed to get container status \"7aaa49b7d47fafa01297b8be126954a845f0bf2f977518d99e7b1ab56caa8001\": rpc error: code = NotFound desc = could not find container \"7aaa49b7d47fafa01297b8be126954a845f0bf2f977518d99e7b1ab56caa8001\": container with ID starting with 7aaa49b7d47fafa01297b8be126954a845f0bf2f977518d99e7b1ab56caa8001 not found: ID does not exist" Dec 05 20:17:41 crc kubenswrapper[4982]: I1205 20:17:41.404281 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="041ff87b-490c-4a46-9803-c12b6887b9ee" path="/var/lib/kubelet/pods/041ff87b-490c-4a46-9803-c12b6887b9ee/volumes" Dec 05 20:17:54 crc kubenswrapper[4982]: I1205 20:17:54.391271 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:17:54 crc kubenswrapper[4982]: E1205 20:17:54.392095 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:18:09 crc kubenswrapper[4982]: I1205 20:18:09.390316 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:18:09 crc kubenswrapper[4982]: E1205 20:18:09.393290 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:18:20 crc kubenswrapper[4982]: I1205 20:18:20.390944 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:18:20 crc kubenswrapper[4982]: E1205 20:18:20.391917 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:18:32 crc kubenswrapper[4982]: I1205 20:18:32.391300 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:18:32 crc kubenswrapper[4982]: E1205 20:18:32.392027 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:18:43 crc kubenswrapper[4982]: I1205 20:18:43.391257 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:18:43 crc kubenswrapper[4982]: E1205 20:18:43.392213 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:18:56 crc kubenswrapper[4982]: I1205 20:18:56.390437 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:18:56 crc kubenswrapper[4982]: E1205 20:18:56.391294 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:19:10 crc kubenswrapper[4982]: I1205 20:19:10.390406 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:19:10 crc kubenswrapper[4982]: E1205 20:19:10.391468 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:19:25 crc kubenswrapper[4982]: I1205 20:19:25.393114 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:19:25 crc kubenswrapper[4982]: E1205 20:19:25.394029 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:19:40 crc kubenswrapper[4982]: I1205 20:19:40.392090 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:19:40 crc kubenswrapper[4982]: E1205 20:19:40.393143 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.391276 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:19:53 crc kubenswrapper[4982]: E1205 20:19:53.392035 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.419642 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wkr2f/must-gather-fm4bf"] Dec 05 20:19:53 crc kubenswrapper[4982]: E1205 20:19:53.420246 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="041ff87b-490c-4a46-9803-c12b6887b9ee" containerName="extract-content" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.420261 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="041ff87b-490c-4a46-9803-c12b6887b9ee" containerName="extract-content" Dec 05 20:19:53 crc kubenswrapper[4982]: E1205 20:19:53.420290 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="041ff87b-490c-4a46-9803-c12b6887b9ee" containerName="registry-server" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.420297 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="041ff87b-490c-4a46-9803-c12b6887b9ee" containerName="registry-server" Dec 05 20:19:53 crc kubenswrapper[4982]: E1205 20:19:53.420318 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="041ff87b-490c-4a46-9803-c12b6887b9ee" containerName="extract-utilities" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.420324 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="041ff87b-490c-4a46-9803-c12b6887b9ee" containerName="extract-utilities" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.420519 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="041ff87b-490c-4a46-9803-c12b6887b9ee" containerName="registry-server" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.421618 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/must-gather-fm4bf" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.425181 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wkr2f"/"openshift-service-ca.crt" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.425264 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-wkr2f"/"default-dockercfg-8p2s5" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.425823 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wkr2f"/"kube-root-ca.crt" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.443704 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wkr2f/must-gather-fm4bf"] Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.543725 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9684edc7-e2f1-4e85-ab1b-5d5181219875-must-gather-output\") pod \"must-gather-fm4bf\" (UID: \"9684edc7-e2f1-4e85-ab1b-5d5181219875\") " pod="openshift-must-gather-wkr2f/must-gather-fm4bf" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.543833 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rq97\" (UniqueName: \"kubernetes.io/projected/9684edc7-e2f1-4e85-ab1b-5d5181219875-kube-api-access-4rq97\") pod \"must-gather-fm4bf\" (UID: \"9684edc7-e2f1-4e85-ab1b-5d5181219875\") " pod="openshift-must-gather-wkr2f/must-gather-fm4bf" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.646296 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9684edc7-e2f1-4e85-ab1b-5d5181219875-must-gather-output\") pod \"must-gather-fm4bf\" (UID: \"9684edc7-e2f1-4e85-ab1b-5d5181219875\") " pod="openshift-must-gather-wkr2f/must-gather-fm4bf" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.646408 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rq97\" (UniqueName: \"kubernetes.io/projected/9684edc7-e2f1-4e85-ab1b-5d5181219875-kube-api-access-4rq97\") pod \"must-gather-fm4bf\" (UID: \"9684edc7-e2f1-4e85-ab1b-5d5181219875\") " pod="openshift-must-gather-wkr2f/must-gather-fm4bf" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.646853 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9684edc7-e2f1-4e85-ab1b-5d5181219875-must-gather-output\") pod \"must-gather-fm4bf\" (UID: \"9684edc7-e2f1-4e85-ab1b-5d5181219875\") " pod="openshift-must-gather-wkr2f/must-gather-fm4bf" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.673550 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rq97\" (UniqueName: \"kubernetes.io/projected/9684edc7-e2f1-4e85-ab1b-5d5181219875-kube-api-access-4rq97\") pod \"must-gather-fm4bf\" (UID: \"9684edc7-e2f1-4e85-ab1b-5d5181219875\") " pod="openshift-must-gather-wkr2f/must-gather-fm4bf" Dec 05 20:19:53 crc kubenswrapper[4982]: I1205 20:19:53.747780 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/must-gather-fm4bf" Dec 05 20:19:54 crc kubenswrapper[4982]: I1205 20:19:54.301909 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wkr2f/must-gather-fm4bf"] Dec 05 20:19:54 crc kubenswrapper[4982]: I1205 20:19:54.386206 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wkr2f/must-gather-fm4bf" event={"ID":"9684edc7-e2f1-4e85-ab1b-5d5181219875","Type":"ContainerStarted","Data":"d283f080ac0170f130f7164ae90de2e54fb135e6de73bb4c1ec9cd4767567b61"} Dec 05 20:19:55 crc kubenswrapper[4982]: I1205 20:19:55.402242 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wkr2f/must-gather-fm4bf" event={"ID":"9684edc7-e2f1-4e85-ab1b-5d5181219875","Type":"ContainerStarted","Data":"0adbb8c1020477b86faa2431d44064430beea8a51fcf4fc2a8f08e27285a4b5d"} Dec 05 20:19:55 crc kubenswrapper[4982]: I1205 20:19:55.402662 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wkr2f/must-gather-fm4bf" event={"ID":"9684edc7-e2f1-4e85-ab1b-5d5181219875","Type":"ContainerStarted","Data":"9c74d3039f9648d9dc56c379229186ee357a70088cc6619f1e317996f1b653b4"} Dec 05 20:19:55 crc kubenswrapper[4982]: I1205 20:19:55.431712 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wkr2f/must-gather-fm4bf" podStartSLOduration=2.431684713 podStartE2EDuration="2.431684713s" podCreationTimestamp="2025-12-05 20:19:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 20:19:55.42014149 +0000 UTC m=+3974.302027495" watchObservedRunningTime="2025-12-05 20:19:55.431684713 +0000 UTC m=+3974.313570708" Dec 05 20:19:59 crc kubenswrapper[4982]: I1205 20:19:59.169303 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wkr2f/crc-debug-64wfv"] Dec 05 20:19:59 crc kubenswrapper[4982]: I1205 20:19:59.173329 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/crc-debug-64wfv" Dec 05 20:19:59 crc kubenswrapper[4982]: I1205 20:19:59.268495 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf8gc\" (UniqueName: \"kubernetes.io/projected/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2-kube-api-access-bf8gc\") pod \"crc-debug-64wfv\" (UID: \"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2\") " pod="openshift-must-gather-wkr2f/crc-debug-64wfv" Dec 05 20:19:59 crc kubenswrapper[4982]: I1205 20:19:59.268560 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2-host\") pod \"crc-debug-64wfv\" (UID: \"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2\") " pod="openshift-must-gather-wkr2f/crc-debug-64wfv" Dec 05 20:19:59 crc kubenswrapper[4982]: I1205 20:19:59.371073 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf8gc\" (UniqueName: \"kubernetes.io/projected/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2-kube-api-access-bf8gc\") pod \"crc-debug-64wfv\" (UID: \"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2\") " pod="openshift-must-gather-wkr2f/crc-debug-64wfv" Dec 05 20:19:59 crc kubenswrapper[4982]: I1205 20:19:59.371141 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2-host\") pod \"crc-debug-64wfv\" (UID: \"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2\") " pod="openshift-must-gather-wkr2f/crc-debug-64wfv" Dec 05 20:19:59 crc kubenswrapper[4982]: I1205 20:19:59.371444 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2-host\") pod \"crc-debug-64wfv\" (UID: \"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2\") " pod="openshift-must-gather-wkr2f/crc-debug-64wfv" Dec 05 20:19:59 crc kubenswrapper[4982]: I1205 20:19:59.396751 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf8gc\" (UniqueName: \"kubernetes.io/projected/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2-kube-api-access-bf8gc\") pod \"crc-debug-64wfv\" (UID: \"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2\") " pod="openshift-must-gather-wkr2f/crc-debug-64wfv" Dec 05 20:19:59 crc kubenswrapper[4982]: I1205 20:19:59.504677 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/crc-debug-64wfv" Dec 05 20:20:00 crc kubenswrapper[4982]: I1205 20:20:00.451937 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wkr2f/crc-debug-64wfv" event={"ID":"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2","Type":"ContainerStarted","Data":"ab5f4e6fa0f65c2aa9d0a4813c83c8e9da78ddcd318a95ab1a929644d769e26b"} Dec 05 20:20:00 crc kubenswrapper[4982]: I1205 20:20:00.452420 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wkr2f/crc-debug-64wfv" event={"ID":"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2","Type":"ContainerStarted","Data":"9320c1f91f88f61a646ddadc42e25e8742b03b6e69254dcb51bcdcbd0cb2e6e3"} Dec 05 20:20:00 crc kubenswrapper[4982]: I1205 20:20:00.475262 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wkr2f/crc-debug-64wfv" podStartSLOduration=1.475243657 podStartE2EDuration="1.475243657s" podCreationTimestamp="2025-12-05 20:19:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 20:20:00.46354879 +0000 UTC m=+3979.345434785" watchObservedRunningTime="2025-12-05 20:20:00.475243657 +0000 UTC m=+3979.357129652" Dec 05 20:20:05 crc kubenswrapper[4982]: I1205 20:20:05.390981 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:20:05 crc kubenswrapper[4982]: E1205 20:20:05.391688 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:20:20 crc kubenswrapper[4982]: I1205 20:20:20.390758 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:20:21 crc kubenswrapper[4982]: I1205 20:20:21.653221 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"5c01852a9608de15cb4551f00909b972e10154a4e9408a8f22cfcef0f86b533d"} Dec 05 20:20:36 crc kubenswrapper[4982]: I1205 20:20:36.780829 4982 generic.go:334] "Generic (PLEG): container finished" podID="3fbb90d2-aa7a-4086-808c-ca6d6c07fed2" containerID="ab5f4e6fa0f65c2aa9d0a4813c83c8e9da78ddcd318a95ab1a929644d769e26b" exitCode=0 Dec 05 20:20:36 crc kubenswrapper[4982]: I1205 20:20:36.780913 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wkr2f/crc-debug-64wfv" event={"ID":"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2","Type":"ContainerDied","Data":"ab5f4e6fa0f65c2aa9d0a4813c83c8e9da78ddcd318a95ab1a929644d769e26b"} Dec 05 20:20:38 crc kubenswrapper[4982]: I1205 20:20:38.574672 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/crc-debug-64wfv" Dec 05 20:20:38 crc kubenswrapper[4982]: I1205 20:20:38.608787 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wkr2f/crc-debug-64wfv"] Dec 05 20:20:38 crc kubenswrapper[4982]: I1205 20:20:38.619540 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wkr2f/crc-debug-64wfv"] Dec 05 20:20:38 crc kubenswrapper[4982]: I1205 20:20:38.757768 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2-host\") pod \"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2\" (UID: \"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2\") " Dec 05 20:20:38 crc kubenswrapper[4982]: I1205 20:20:38.757997 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf8gc\" (UniqueName: \"kubernetes.io/projected/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2-kube-api-access-bf8gc\") pod \"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2\" (UID: \"3fbb90d2-aa7a-4086-808c-ca6d6c07fed2\") " Dec 05 20:20:38 crc kubenswrapper[4982]: I1205 20:20:38.759780 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2-host" (OuterVolumeSpecName: "host") pod "3fbb90d2-aa7a-4086-808c-ca6d6c07fed2" (UID: "3fbb90d2-aa7a-4086-808c-ca6d6c07fed2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 20:20:38 crc kubenswrapper[4982]: I1205 20:20:38.768449 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2-kube-api-access-bf8gc" (OuterVolumeSpecName: "kube-api-access-bf8gc") pod "3fbb90d2-aa7a-4086-808c-ca6d6c07fed2" (UID: "3fbb90d2-aa7a-4086-808c-ca6d6c07fed2"). InnerVolumeSpecName "kube-api-access-bf8gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:20:38 crc kubenswrapper[4982]: I1205 20:20:38.801312 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9320c1f91f88f61a646ddadc42e25e8742b03b6e69254dcb51bcdcbd0cb2e6e3" Dec 05 20:20:38 crc kubenswrapper[4982]: I1205 20:20:38.801418 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/crc-debug-64wfv" Dec 05 20:20:38 crc kubenswrapper[4982]: I1205 20:20:38.860207 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf8gc\" (UniqueName: \"kubernetes.io/projected/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2-kube-api-access-bf8gc\") on node \"crc\" DevicePath \"\"" Dec 05 20:20:38 crc kubenswrapper[4982]: I1205 20:20:38.860237 4982 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2-host\") on node \"crc\" DevicePath \"\"" Dec 05 20:20:39 crc kubenswrapper[4982]: I1205 20:20:39.404750 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3fbb90d2-aa7a-4086-808c-ca6d6c07fed2" path="/var/lib/kubelet/pods/3fbb90d2-aa7a-4086-808c-ca6d6c07fed2/volumes" Dec 05 20:20:39 crc kubenswrapper[4982]: I1205 20:20:39.800920 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wkr2f/crc-debug-865ml"] Dec 05 20:20:39 crc kubenswrapper[4982]: E1205 20:20:39.802387 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fbb90d2-aa7a-4086-808c-ca6d6c07fed2" containerName="container-00" Dec 05 20:20:39 crc kubenswrapper[4982]: I1205 20:20:39.802486 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fbb90d2-aa7a-4086-808c-ca6d6c07fed2" containerName="container-00" Dec 05 20:20:39 crc kubenswrapper[4982]: I1205 20:20:39.802792 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fbb90d2-aa7a-4086-808c-ca6d6c07fed2" containerName="container-00" Dec 05 20:20:39 crc kubenswrapper[4982]: I1205 20:20:39.804919 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/crc-debug-865ml" Dec 05 20:20:39 crc kubenswrapper[4982]: I1205 20:20:39.979793 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcw26\" (UniqueName: \"kubernetes.io/projected/98548cab-e999-405b-a615-a7d0a59c7845-kube-api-access-vcw26\") pod \"crc-debug-865ml\" (UID: \"98548cab-e999-405b-a615-a7d0a59c7845\") " pod="openshift-must-gather-wkr2f/crc-debug-865ml" Dec 05 20:20:39 crc kubenswrapper[4982]: I1205 20:20:39.980029 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/98548cab-e999-405b-a615-a7d0a59c7845-host\") pod \"crc-debug-865ml\" (UID: \"98548cab-e999-405b-a615-a7d0a59c7845\") " pod="openshift-must-gather-wkr2f/crc-debug-865ml" Dec 05 20:20:40 crc kubenswrapper[4982]: I1205 20:20:40.081785 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcw26\" (UniqueName: \"kubernetes.io/projected/98548cab-e999-405b-a615-a7d0a59c7845-kube-api-access-vcw26\") pod \"crc-debug-865ml\" (UID: \"98548cab-e999-405b-a615-a7d0a59c7845\") " pod="openshift-must-gather-wkr2f/crc-debug-865ml" Dec 05 20:20:40 crc kubenswrapper[4982]: I1205 20:20:40.081925 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/98548cab-e999-405b-a615-a7d0a59c7845-host\") pod \"crc-debug-865ml\" (UID: \"98548cab-e999-405b-a615-a7d0a59c7845\") " pod="openshift-must-gather-wkr2f/crc-debug-865ml" Dec 05 20:20:40 crc kubenswrapper[4982]: I1205 20:20:40.082011 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/98548cab-e999-405b-a615-a7d0a59c7845-host\") pod \"crc-debug-865ml\" (UID: \"98548cab-e999-405b-a615-a7d0a59c7845\") " pod="openshift-must-gather-wkr2f/crc-debug-865ml" Dec 05 20:20:40 crc kubenswrapper[4982]: I1205 20:20:40.443287 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcw26\" (UniqueName: \"kubernetes.io/projected/98548cab-e999-405b-a615-a7d0a59c7845-kube-api-access-vcw26\") pod \"crc-debug-865ml\" (UID: \"98548cab-e999-405b-a615-a7d0a59c7845\") " pod="openshift-must-gather-wkr2f/crc-debug-865ml" Dec 05 20:20:40 crc kubenswrapper[4982]: I1205 20:20:40.457877 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/crc-debug-865ml" Dec 05 20:20:40 crc kubenswrapper[4982]: I1205 20:20:40.831769 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wkr2f/crc-debug-865ml" event={"ID":"98548cab-e999-405b-a615-a7d0a59c7845","Type":"ContainerStarted","Data":"9c20500332d6578c2b96f1e407a8c2d13ab4c79b1a1a8c5dedde0b519b7443f1"} Dec 05 20:20:40 crc kubenswrapper[4982]: I1205 20:20:40.832068 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wkr2f/crc-debug-865ml" event={"ID":"98548cab-e999-405b-a615-a7d0a59c7845","Type":"ContainerStarted","Data":"6007f6c5c233bd4e733e67420b1486a653bceeaf742433722b6850357f65b92f"} Dec 05 20:20:40 crc kubenswrapper[4982]: I1205 20:20:40.853013 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wkr2f/crc-debug-865ml" podStartSLOduration=1.852997547 podStartE2EDuration="1.852997547s" podCreationTimestamp="2025-12-05 20:20:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 20:20:40.844283813 +0000 UTC m=+4019.726169808" watchObservedRunningTime="2025-12-05 20:20:40.852997547 +0000 UTC m=+4019.734883542" Dec 05 20:20:41 crc kubenswrapper[4982]: I1205 20:20:41.841899 4982 generic.go:334] "Generic (PLEG): container finished" podID="98548cab-e999-405b-a615-a7d0a59c7845" containerID="9c20500332d6578c2b96f1e407a8c2d13ab4c79b1a1a8c5dedde0b519b7443f1" exitCode=0 Dec 05 20:20:41 crc kubenswrapper[4982]: I1205 20:20:41.842059 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wkr2f/crc-debug-865ml" event={"ID":"98548cab-e999-405b-a615-a7d0a59c7845","Type":"ContainerDied","Data":"9c20500332d6578c2b96f1e407a8c2d13ab4c79b1a1a8c5dedde0b519b7443f1"} Dec 05 20:20:42 crc kubenswrapper[4982]: I1205 20:20:42.963611 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/crc-debug-865ml" Dec 05 20:20:43 crc kubenswrapper[4982]: I1205 20:20:43.006072 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wkr2f/crc-debug-865ml"] Dec 05 20:20:43 crc kubenswrapper[4982]: I1205 20:20:43.015948 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wkr2f/crc-debug-865ml"] Dec 05 20:20:43 crc kubenswrapper[4982]: I1205 20:20:43.140102 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/98548cab-e999-405b-a615-a7d0a59c7845-host\") pod \"98548cab-e999-405b-a615-a7d0a59c7845\" (UID: \"98548cab-e999-405b-a615-a7d0a59c7845\") " Dec 05 20:20:43 crc kubenswrapper[4982]: I1205 20:20:43.140489 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vcw26\" (UniqueName: \"kubernetes.io/projected/98548cab-e999-405b-a615-a7d0a59c7845-kube-api-access-vcw26\") pod \"98548cab-e999-405b-a615-a7d0a59c7845\" (UID: \"98548cab-e999-405b-a615-a7d0a59c7845\") " Dec 05 20:20:43 crc kubenswrapper[4982]: I1205 20:20:43.140235 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/98548cab-e999-405b-a615-a7d0a59c7845-host" (OuterVolumeSpecName: "host") pod "98548cab-e999-405b-a615-a7d0a59c7845" (UID: "98548cab-e999-405b-a615-a7d0a59c7845"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 20:20:43 crc kubenswrapper[4982]: I1205 20:20:43.140951 4982 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/98548cab-e999-405b-a615-a7d0a59c7845-host\") on node \"crc\" DevicePath \"\"" Dec 05 20:20:43 crc kubenswrapper[4982]: I1205 20:20:43.151809 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98548cab-e999-405b-a615-a7d0a59c7845-kube-api-access-vcw26" (OuterVolumeSpecName: "kube-api-access-vcw26") pod "98548cab-e999-405b-a615-a7d0a59c7845" (UID: "98548cab-e999-405b-a615-a7d0a59c7845"). InnerVolumeSpecName "kube-api-access-vcw26". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:20:43 crc kubenswrapper[4982]: I1205 20:20:43.242332 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vcw26\" (UniqueName: \"kubernetes.io/projected/98548cab-e999-405b-a615-a7d0a59c7845-kube-api-access-vcw26\") on node \"crc\" DevicePath \"\"" Dec 05 20:20:43 crc kubenswrapper[4982]: I1205 20:20:43.400374 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98548cab-e999-405b-a615-a7d0a59c7845" path="/var/lib/kubelet/pods/98548cab-e999-405b-a615-a7d0a59c7845/volumes" Dec 05 20:20:43 crc kubenswrapper[4982]: I1205 20:20:43.862841 4982 scope.go:117] "RemoveContainer" containerID="9c20500332d6578c2b96f1e407a8c2d13ab4c79b1a1a8c5dedde0b519b7443f1" Dec 05 20:20:43 crc kubenswrapper[4982]: I1205 20:20:43.862901 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/crc-debug-865ml" Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.169665 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wkr2f/crc-debug-hjkv9"] Dec 05 20:20:44 crc kubenswrapper[4982]: E1205 20:20:44.171102 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98548cab-e999-405b-a615-a7d0a59c7845" containerName="container-00" Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.171135 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="98548cab-e999-405b-a615-a7d0a59c7845" containerName="container-00" Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.171422 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="98548cab-e999-405b-a615-a7d0a59c7845" containerName="container-00" Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.172262 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/crc-debug-hjkv9" Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.362905 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhpfh\" (UniqueName: \"kubernetes.io/projected/c7df0020-02d9-4fc0-9402-1d95c6c27ada-kube-api-access-mhpfh\") pod \"crc-debug-hjkv9\" (UID: \"c7df0020-02d9-4fc0-9402-1d95c6c27ada\") " pod="openshift-must-gather-wkr2f/crc-debug-hjkv9" Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.363028 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c7df0020-02d9-4fc0-9402-1d95c6c27ada-host\") pod \"crc-debug-hjkv9\" (UID: \"c7df0020-02d9-4fc0-9402-1d95c6c27ada\") " pod="openshift-must-gather-wkr2f/crc-debug-hjkv9" Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.465163 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c7df0020-02d9-4fc0-9402-1d95c6c27ada-host\") pod \"crc-debug-hjkv9\" (UID: \"c7df0020-02d9-4fc0-9402-1d95c6c27ada\") " pod="openshift-must-gather-wkr2f/crc-debug-hjkv9" Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.465294 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhpfh\" (UniqueName: \"kubernetes.io/projected/c7df0020-02d9-4fc0-9402-1d95c6c27ada-kube-api-access-mhpfh\") pod \"crc-debug-hjkv9\" (UID: \"c7df0020-02d9-4fc0-9402-1d95c6c27ada\") " pod="openshift-must-gather-wkr2f/crc-debug-hjkv9" Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.465663 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c7df0020-02d9-4fc0-9402-1d95c6c27ada-host\") pod \"crc-debug-hjkv9\" (UID: \"c7df0020-02d9-4fc0-9402-1d95c6c27ada\") " pod="openshift-must-gather-wkr2f/crc-debug-hjkv9" Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.488257 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhpfh\" (UniqueName: \"kubernetes.io/projected/c7df0020-02d9-4fc0-9402-1d95c6c27ada-kube-api-access-mhpfh\") pod \"crc-debug-hjkv9\" (UID: \"c7df0020-02d9-4fc0-9402-1d95c6c27ada\") " pod="openshift-must-gather-wkr2f/crc-debug-hjkv9" Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.491832 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/crc-debug-hjkv9" Dec 05 20:20:44 crc kubenswrapper[4982]: W1205 20:20:44.516838 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7df0020_02d9_4fc0_9402_1d95c6c27ada.slice/crio-cfbf79d1d4477806f295d4b9e0acc54867902c53b7445214af4da5d5c7baab63 WatchSource:0}: Error finding container cfbf79d1d4477806f295d4b9e0acc54867902c53b7445214af4da5d5c7baab63: Status 404 returned error can't find the container with id cfbf79d1d4477806f295d4b9e0acc54867902c53b7445214af4da5d5c7baab63 Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.873276 4982 generic.go:334] "Generic (PLEG): container finished" podID="c7df0020-02d9-4fc0-9402-1d95c6c27ada" containerID="83dde69e83609978b29e3f9af9de762fa03679d52018902eb95a7f6d6870eae1" exitCode=0 Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.873340 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wkr2f/crc-debug-hjkv9" event={"ID":"c7df0020-02d9-4fc0-9402-1d95c6c27ada","Type":"ContainerDied","Data":"83dde69e83609978b29e3f9af9de762fa03679d52018902eb95a7f6d6870eae1"} Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.873624 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wkr2f/crc-debug-hjkv9" event={"ID":"c7df0020-02d9-4fc0-9402-1d95c6c27ada","Type":"ContainerStarted","Data":"cfbf79d1d4477806f295d4b9e0acc54867902c53b7445214af4da5d5c7baab63"} Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.917573 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wkr2f/crc-debug-hjkv9"] Dec 05 20:20:44 crc kubenswrapper[4982]: I1205 20:20:44.928179 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wkr2f/crc-debug-hjkv9"] Dec 05 20:20:46 crc kubenswrapper[4982]: I1205 20:20:46.015434 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/crc-debug-hjkv9" Dec 05 20:20:46 crc kubenswrapper[4982]: I1205 20:20:46.200655 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c7df0020-02d9-4fc0-9402-1d95c6c27ada-host\") pod \"c7df0020-02d9-4fc0-9402-1d95c6c27ada\" (UID: \"c7df0020-02d9-4fc0-9402-1d95c6c27ada\") " Dec 05 20:20:46 crc kubenswrapper[4982]: I1205 20:20:46.200815 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c7df0020-02d9-4fc0-9402-1d95c6c27ada-host" (OuterVolumeSpecName: "host") pod "c7df0020-02d9-4fc0-9402-1d95c6c27ada" (UID: "c7df0020-02d9-4fc0-9402-1d95c6c27ada"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 20:20:46 crc kubenswrapper[4982]: I1205 20:20:46.200961 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mhpfh\" (UniqueName: \"kubernetes.io/projected/c7df0020-02d9-4fc0-9402-1d95c6c27ada-kube-api-access-mhpfh\") pod \"c7df0020-02d9-4fc0-9402-1d95c6c27ada\" (UID: \"c7df0020-02d9-4fc0-9402-1d95c6c27ada\") " Dec 05 20:20:46 crc kubenswrapper[4982]: I1205 20:20:46.202999 4982 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c7df0020-02d9-4fc0-9402-1d95c6c27ada-host\") on node \"crc\" DevicePath \"\"" Dec 05 20:20:46 crc kubenswrapper[4982]: I1205 20:20:46.207448 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7df0020-02d9-4fc0-9402-1d95c6c27ada-kube-api-access-mhpfh" (OuterVolumeSpecName: "kube-api-access-mhpfh") pod "c7df0020-02d9-4fc0-9402-1d95c6c27ada" (UID: "c7df0020-02d9-4fc0-9402-1d95c6c27ada"). InnerVolumeSpecName "kube-api-access-mhpfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:20:46 crc kubenswrapper[4982]: I1205 20:20:46.305077 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mhpfh\" (UniqueName: \"kubernetes.io/projected/c7df0020-02d9-4fc0-9402-1d95c6c27ada-kube-api-access-mhpfh\") on node \"crc\" DevicePath \"\"" Dec 05 20:20:46 crc kubenswrapper[4982]: I1205 20:20:46.897202 4982 scope.go:117] "RemoveContainer" containerID="83dde69e83609978b29e3f9af9de762fa03679d52018902eb95a7f6d6870eae1" Dec 05 20:20:46 crc kubenswrapper[4982]: I1205 20:20:46.897220 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/crc-debug-hjkv9" Dec 05 20:20:47 crc kubenswrapper[4982]: I1205 20:20:47.404311 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7df0020-02d9-4fc0-9402-1d95c6c27ada" path="/var/lib/kubelet/pods/c7df0020-02d9-4fc0-9402-1d95c6c27ada/volumes" Dec 05 20:21:31 crc kubenswrapper[4982]: I1205 20:21:31.125366 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_4e3679d1-2b65-494e-bc5f-2a68697da816/init-config-reloader/0.log" Dec 05 20:21:31 crc kubenswrapper[4982]: I1205 20:21:31.351231 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_4e3679d1-2b65-494e-bc5f-2a68697da816/init-config-reloader/0.log" Dec 05 20:21:31 crc kubenswrapper[4982]: I1205 20:21:31.421298 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_4e3679d1-2b65-494e-bc5f-2a68697da816/alertmanager/0.log" Dec 05 20:21:31 crc kubenswrapper[4982]: I1205 20:21:31.454994 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_4e3679d1-2b65-494e-bc5f-2a68697da816/config-reloader/0.log" Dec 05 20:21:31 crc kubenswrapper[4982]: I1205 20:21:31.542111 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6fbc9dfdf4-kq8zv_cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb/barbican-api/0.log" Dec 05 20:21:31 crc kubenswrapper[4982]: I1205 20:21:31.618506 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6fbc9dfdf4-kq8zv_cb3f27aa-6bbf-4027-aa22-cbd6a319c3cb/barbican-api-log/0.log" Dec 05 20:21:31 crc kubenswrapper[4982]: I1205 20:21:31.664290 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d7dcb9f9d-rc2ld_f129356d-d3c2-4fc0-856e-2310b4c29996/barbican-keystone-listener/0.log" Dec 05 20:21:31 crc kubenswrapper[4982]: I1205 20:21:31.893750 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d7dcb9f9d-rc2ld_f129356d-d3c2-4fc0-856e-2310b4c29996/barbican-keystone-listener-log/0.log" Dec 05 20:21:31 crc kubenswrapper[4982]: I1205 20:21:31.912098 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-d99845895-c6s44_3eb20c0f-b133-4aab-a43a-22dab1ae0630/barbican-worker/0.log" Dec 05 20:21:31 crc kubenswrapper[4982]: I1205 20:21:31.975548 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-d99845895-c6s44_3eb20c0f-b133-4aab-a43a-22dab1ae0630/barbican-worker-log/0.log" Dec 05 20:21:33 crc kubenswrapper[4982]: I1205 20:21:33.036127 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-6lcbt_3cd634f3-b987-404a-a10b-609341e2b548/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:33 crc kubenswrapper[4982]: I1205 20:21:33.126440 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fdfbdc81-b68b-414e-af43-2f44719ca203/ceilometer-central-agent/0.log" Dec 05 20:21:33 crc kubenswrapper[4982]: I1205 20:21:33.242953 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fdfbdc81-b68b-414e-af43-2f44719ca203/ceilometer-notification-agent/0.log" Dec 05 20:21:33 crc kubenswrapper[4982]: I1205 20:21:33.335931 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fdfbdc81-b68b-414e-af43-2f44719ca203/sg-core/0.log" Dec 05 20:21:33 crc kubenswrapper[4982]: I1205 20:21:33.342248 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_fdfbdc81-b68b-414e-af43-2f44719ca203/proxy-httpd/0.log" Dec 05 20:21:33 crc kubenswrapper[4982]: I1205 20:21:33.776373 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_53a46b29-95f9-43a2-8d2a-770693317314/cinder-api/0.log" Dec 05 20:21:33 crc kubenswrapper[4982]: I1205 20:21:33.818667 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_53a46b29-95f9-43a2-8d2a-770693317314/cinder-api-log/0.log" Dec 05 20:21:33 crc kubenswrapper[4982]: I1205 20:21:33.989399 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_90a28c6e-ad50-4f47-900b-f35bc06060a3/probe/0.log" Dec 05 20:21:34 crc kubenswrapper[4982]: I1205 20:21:34.068598 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_90a28c6e-ad50-4f47-900b-f35bc06060a3/cinder-scheduler/0.log" Dec 05 20:21:34 crc kubenswrapper[4982]: I1205 20:21:34.119958 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_a30af9ef-11ee-4919-8ca8-2ba7d588264b/cloudkitty-api/0.log" Dec 05 20:21:34 crc kubenswrapper[4982]: I1205 20:21:34.143863 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_a30af9ef-11ee-4919-8ca8-2ba7d588264b/cloudkitty-api-log/0.log" Dec 05 20:21:34 crc kubenswrapper[4982]: I1205 20:21:34.254364 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_892ec5b5-4495-4ef9-ae57-7e3c535e11ca/loki-compactor/0.log" Dec 05 20:21:34 crc kubenswrapper[4982]: I1205 20:21:34.358628 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-664b687b54-xvg29_97f36210-9f01-4ba6-95e4-0aea23aefbb3/loki-distributor/0.log" Dec 05 20:21:34 crc kubenswrapper[4982]: I1205 20:21:34.571275 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-bc75944f-xcgh9_b670abee-8f29-4979-8c53-5226b58a0141/gateway/0.log" Dec 05 20:21:34 crc kubenswrapper[4982]: I1205 20:21:34.609833 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-bc75944f-jgdkk_25283efb-caa6-418a-8228-f3dcf1802be2/gateway/0.log" Dec 05 20:21:34 crc kubenswrapper[4982]: I1205 20:21:34.686423 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_8c96cc97-f375-489f-9168-bac3695b309a/loki-index-gateway/0.log" Dec 05 20:21:35 crc kubenswrapper[4982]: I1205 20:21:35.220920 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_85a8d068-da43-4ed2-879a-281872eab097/loki-ingester/0.log" Dec 05 20:21:35 crc kubenswrapper[4982]: I1205 20:21:35.285285 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-7c8cd744d9-rpz4z_4e468d19-dc16-452b-b3c8-cd5df67c4748/loki-query-frontend/0.log" Dec 05 20:21:35 crc kubenswrapper[4982]: I1205 20:21:35.494318 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-kjbw4_27e277b5-5b4e-4d77-afbd-1b7c2d53918e/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:35 crc kubenswrapper[4982]: I1205 20:21:35.762317 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-2mtkl_9817cc48-c666-468c-a9cf-327fa1898ad9/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:35 crc kubenswrapper[4982]: I1205 20:21:35.822338 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5475ccd585-jn7l9_a33770ab-0040-4eb9-92c5-7c25cb66fa33/init/0.log" Dec 05 20:21:36 crc kubenswrapper[4982]: I1205 20:21:36.086051 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5475ccd585-jn7l9_a33770ab-0040-4eb9-92c5-7c25cb66fa33/init/0.log" Dec 05 20:21:36 crc kubenswrapper[4982]: I1205 20:21:36.213241 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5475ccd585-jn7l9_a33770ab-0040-4eb9-92c5-7c25cb66fa33/dnsmasq-dns/0.log" Dec 05 20:21:36 crc kubenswrapper[4982]: I1205 20:21:36.329732 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-lgkzq_2d9ae0b1-8af5-4522-af29-d67b2c829ca0/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:36 crc kubenswrapper[4982]: I1205 20:21:36.454230 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_de9f5462-cedf-4860-9b59-bac07091738f/glance-httpd/0.log" Dec 05 20:21:36 crc kubenswrapper[4982]: I1205 20:21:36.499503 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-5467947bf7-zrl2q_39dc27e3-5788-40fd-b186-9c91aa5618eb/loki-querier/0.log" Dec 05 20:21:36 crc kubenswrapper[4982]: I1205 20:21:36.539594 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_de9f5462-cedf-4860-9b59-bac07091738f/glance-log/0.log" Dec 05 20:21:36 crc kubenswrapper[4982]: I1205 20:21:36.896386 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_36af0f53-11a4-47a8-9361-acde52280271/glance-log/0.log" Dec 05 20:21:36 crc kubenswrapper[4982]: I1205 20:21:36.984326 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_36af0f53-11a4-47a8-9361-acde52280271/glance-httpd/0.log" Dec 05 20:21:37 crc kubenswrapper[4982]: I1205 20:21:37.156972 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-wmk6q_ddbe0e8f-d183-4f24-a7cf-221b3221cb27/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:37 crc kubenswrapper[4982]: I1205 20:21:37.311765 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-vlqtj_f9b734cf-d38f-487a-a4a5-1a0a0b10f9cb/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:37 crc kubenswrapper[4982]: I1205 20:21:37.567807 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29416081-4j7pq_76cb36b1-d2a2-4742-8886-fc3305fb4082/keystone-cron/0.log" Dec 05 20:21:37 crc kubenswrapper[4982]: I1205 20:21:37.748121 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_babd6ca2-04d1-4f51-aaa6-d621a339d799/kube-state-metrics/0.log" Dec 05 20:21:37 crc kubenswrapper[4982]: I1205 20:21:37.835411 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-768c967fc5-mm4fv_87f55498-f78b-4201-9970-b393206ddabf/keystone-api/0.log" Dec 05 20:21:37 crc kubenswrapper[4982]: I1205 20:21:37.946293 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-lbxct_8f1fe72c-1893-4aa4-9fc9-5ab862de7c35/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:38 crc kubenswrapper[4982]: I1205 20:21:38.422564 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-67fdf46f87-pnjnc_4afaca8a-acd1-480f-a132-33155fb3b1b0/neutron-httpd/0.log" Dec 05 20:21:38 crc kubenswrapper[4982]: I1205 20:21:38.478542 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-67fdf46f87-pnjnc_4afaca8a-acd1-480f-a132-33155fb3b1b0/neutron-api/0.log" Dec 05 20:21:38 crc kubenswrapper[4982]: I1205 20:21:38.645748 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-jhdww_6ff78dfa-4b95-4e32-b569-08d967824332/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:39 crc kubenswrapper[4982]: I1205 20:21:39.165225 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_eaf71bf8-6e74-4fec-a151-372e1044b69f/nova-api-log/0.log" Dec 05 20:21:39 crc kubenswrapper[4982]: I1205 20:21:39.483110 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_c7fa7720-ccc7-4ddf-8102-e10818187b20/nova-cell0-conductor-conductor/0.log" Dec 05 20:21:39 crc kubenswrapper[4982]: I1205 20:21:39.525503 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_eaf71bf8-6e74-4fec-a151-372e1044b69f/nova-api-api/0.log" Dec 05 20:21:39 crc kubenswrapper[4982]: I1205 20:21:39.869411 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_b1112e38-5f73-4a31-8f6d-9b03a9148c02/nova-cell1-conductor-conductor/0.log" Dec 05 20:21:39 crc kubenswrapper[4982]: I1205 20:21:39.964348 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_592eec2a-b340-4c42-8b06-ab477b4aecfa/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 20:21:40 crc kubenswrapper[4982]: I1205 20:21:40.364218 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-qxxl7_592205cb-46f4-4bc6-9329-a90e5e63400e/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:40 crc kubenswrapper[4982]: I1205 20:21:40.920981 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_541f0bcc-c2ab-4e67-bc9a-b45a2dc5747d/cloudkitty-proc/0.log" Dec 05 20:21:41 crc kubenswrapper[4982]: I1205 20:21:41.046355 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a196cf68-e14d-4171-90f5-a266a8313f72/nova-metadata-log/0.log" Dec 05 20:21:41 crc kubenswrapper[4982]: I1205 20:21:41.364886 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_af72a355-0521-4724-8224-c7fd9046b4d6/mysql-bootstrap/0.log" Dec 05 20:21:41 crc kubenswrapper[4982]: I1205 20:21:41.388003 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_c2cfe605-218c-442d-b88b-8f2d7b3a6ba0/nova-scheduler-scheduler/0.log" Dec 05 20:21:41 crc kubenswrapper[4982]: I1205 20:21:41.648412 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_af72a355-0521-4724-8224-c7fd9046b4d6/mysql-bootstrap/0.log" Dec 05 20:21:41 crc kubenswrapper[4982]: I1205 20:21:41.702499 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_af72a355-0521-4724-8224-c7fd9046b4d6/galera/0.log" Dec 05 20:21:41 crc kubenswrapper[4982]: I1205 20:21:41.849864 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b6542514-631f-4a81-aba8-11cfebd33048/mysql-bootstrap/0.log" Dec 05 20:21:42 crc kubenswrapper[4982]: I1205 20:21:42.398760 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a196cf68-e14d-4171-90f5-a266a8313f72/nova-metadata-metadata/0.log" Dec 05 20:21:42 crc kubenswrapper[4982]: I1205 20:21:42.726768 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b6542514-631f-4a81-aba8-11cfebd33048/mysql-bootstrap/0.log" Dec 05 20:21:42 crc kubenswrapper[4982]: I1205 20:21:42.871554 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_0a83cc40-0ef8-4fe4-ba74-feb878cf0ac2/openstackclient/0.log" Dec 05 20:21:42 crc kubenswrapper[4982]: I1205 20:21:42.912048 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_b6542514-631f-4a81-aba8-11cfebd33048/galera/0.log" Dec 05 20:21:43 crc kubenswrapper[4982]: I1205 20:21:43.077676 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-jhsjf_c4d66436-88ae-4023-9601-bd2aa6954667/ovn-controller/0.log" Dec 05 20:21:43 crc kubenswrapper[4982]: I1205 20:21:43.106696 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-df27x_c19a072d-5061-4c2e-a817-8708ec746095/openstack-network-exporter/0.log" Dec 05 20:21:43 crc kubenswrapper[4982]: I1205 20:21:43.286218 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-sct9d_d452a876-70e0-416c-ab4d-667b53e8f86e/ovsdb-server-init/0.log" Dec 05 20:21:43 crc kubenswrapper[4982]: I1205 20:21:43.539056 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-sct9d_d452a876-70e0-416c-ab4d-667b53e8f86e/ovs-vswitchd/0.log" Dec 05 20:21:43 crc kubenswrapper[4982]: I1205 20:21:43.572771 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-sct9d_d452a876-70e0-416c-ab4d-667b53e8f86e/ovsdb-server-init/0.log" Dec 05 20:21:43 crc kubenswrapper[4982]: I1205 20:21:43.628428 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-sct9d_d452a876-70e0-416c-ab4d-667b53e8f86e/ovsdb-server/0.log" Dec 05 20:21:43 crc kubenswrapper[4982]: I1205 20:21:43.879313 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-vgqgj_19ae7f76-4c93-41fe-9ac4-aead0ad360fe/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:43 crc kubenswrapper[4982]: I1205 20:21:43.928950 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e6b8e1da-6aa2-4556-a427-35c1f9920482/ovn-northd/0.log" Dec 05 20:21:43 crc kubenswrapper[4982]: I1205 20:21:43.958712 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_e6b8e1da-6aa2-4556-a427-35c1f9920482/openstack-network-exporter/0.log" Dec 05 20:21:44 crc kubenswrapper[4982]: I1205 20:21:44.123907 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_7d0f7663-0afb-41dc-bae8-7efdafbf2ed2/openstack-network-exporter/0.log" Dec 05 20:21:44 crc kubenswrapper[4982]: I1205 20:21:44.170891 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_7d0f7663-0afb-41dc-bae8-7efdafbf2ed2/ovsdbserver-nb/0.log" Dec 05 20:21:44 crc kubenswrapper[4982]: I1205 20:21:44.539795 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0/ovsdbserver-sb/0.log" Dec 05 20:21:44 crc kubenswrapper[4982]: I1205 20:21:44.647142 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_e44e82e2-3dc9-4a76-bb30-f6b2bdfcfec0/openstack-network-exporter/0.log" Dec 05 20:21:44 crc kubenswrapper[4982]: I1205 20:21:44.763737 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-59b8477896-ww7nl_c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235/placement-api/0.log" Dec 05 20:21:44 crc kubenswrapper[4982]: I1205 20:21:44.870594 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-59b8477896-ww7nl_c6bf0e9e-d357-4e82-8aa9-06ad2b0e5235/placement-log/0.log" Dec 05 20:21:44 crc kubenswrapper[4982]: I1205 20:21:44.921543 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a65aa27d-1e48-4991-a573-68e9458e1733/init-config-reloader/0.log" Dec 05 20:21:45 crc kubenswrapper[4982]: I1205 20:21:45.175444 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a65aa27d-1e48-4991-a573-68e9458e1733/thanos-sidecar/0.log" Dec 05 20:21:45 crc kubenswrapper[4982]: I1205 20:21:45.178909 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a65aa27d-1e48-4991-a573-68e9458e1733/prometheus/0.log" Dec 05 20:21:45 crc kubenswrapper[4982]: I1205 20:21:45.196413 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a65aa27d-1e48-4991-a573-68e9458e1733/config-reloader/0.log" Dec 05 20:21:45 crc kubenswrapper[4982]: I1205 20:21:45.233781 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_a65aa27d-1e48-4991-a573-68e9458e1733/init-config-reloader/0.log" Dec 05 20:21:45 crc kubenswrapper[4982]: I1205 20:21:45.451654 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e38c99e0-3c00-4474-9a4e-b388a5630685/setup-container/0.log" Dec 05 20:21:45 crc kubenswrapper[4982]: I1205 20:21:45.731761 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_d9c1e005-1e95-440f-be18-77dbe6a757db/setup-container/0.log" Dec 05 20:21:45 crc kubenswrapper[4982]: I1205 20:21:45.799767 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e38c99e0-3c00-4474-9a4e-b388a5630685/setup-container/0.log" Dec 05 20:21:45 crc kubenswrapper[4982]: I1205 20:21:45.823130 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_e38c99e0-3c00-4474-9a4e-b388a5630685/rabbitmq/0.log" Dec 05 20:21:46 crc kubenswrapper[4982]: I1205 20:21:46.113880 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_d9c1e005-1e95-440f-be18-77dbe6a757db/rabbitmq/0.log" Dec 05 20:21:46 crc kubenswrapper[4982]: I1205 20:21:46.125104 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_d9c1e005-1e95-440f-be18-77dbe6a757db/setup-container/0.log" Dec 05 20:21:46 crc kubenswrapper[4982]: I1205 20:21:46.182238 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-cf864_c22ac692-8285-4e89-8c2b-28b2bc125fa3/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:46 crc kubenswrapper[4982]: I1205 20:21:46.454870 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-lk6dp_5eedd2f2-bb50-4da7-846d-000d03e17934/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:46 crc kubenswrapper[4982]: I1205 20:21:46.465367 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-ljklg_1458b955-6299-4a91-a904-4146c620e208/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:46 crc kubenswrapper[4982]: I1205 20:21:46.642013 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-x59ld_96a0a8ec-a47f-4b4e-abf9-53e0f6a30c09/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:46 crc kubenswrapper[4982]: I1205 20:21:46.716355 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-5zqdl_1f38261e-9d6a-4983-873c-7e7cf37ebb81/ssh-known-hosts-edpm-deployment/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.002062 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-fc787d555-k2pps_4ae7179d-f311-4080-9409-b5315377edea/proxy-httpd/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.003793 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-fc787d555-k2pps_4ae7179d-f311-4080-9409-b5315377edea/proxy-server/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.297237 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/account-auditor/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.308696 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/account-reaper/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.315639 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-hrhl7_155eecea-ebae-400d-a81e-1d28392b290e/swift-ring-rebalance/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.528575 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/account-replicator/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.563350 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/container-replicator/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.579643 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/account-server/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.588858 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/container-auditor/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.720880 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/container-server/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.776831 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/object-auditor/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.783760 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/container-updater/0.log" Dec 05 20:21:47 crc kubenswrapper[4982]: I1205 20:21:47.857347 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/object-expirer/0.log" Dec 05 20:21:48 crc kubenswrapper[4982]: I1205 20:21:48.187308 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/object-replicator/0.log" Dec 05 20:21:48 crc kubenswrapper[4982]: I1205 20:21:48.247789 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/object-updater/0.log" Dec 05 20:21:48 crc kubenswrapper[4982]: I1205 20:21:48.249014 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/object-server/0.log" Dec 05 20:21:48 crc kubenswrapper[4982]: I1205 20:21:48.333294 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/rsync/0.log" Dec 05 20:21:48 crc kubenswrapper[4982]: I1205 20:21:48.502686 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_03bef3aa-7dbf-41c2-8754-7be39af98913/swift-recon-cron/0.log" Dec 05 20:21:48 crc kubenswrapper[4982]: I1205 20:21:48.520311 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-mk8fc_f9f55ee0-0c0c-4edf-9b1f-17ff56560708/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:48 crc kubenswrapper[4982]: I1205 20:21:48.706813 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_f60aebec-8123-4265-b873-fba9eb0f911b/test-operator-logs-container/0.log" Dec 05 20:21:48 crc kubenswrapper[4982]: I1205 20:21:48.751770 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_d3c6fe00-7794-450c-a588-bd1e2afdbe8c/tempest-tests-tempest-tests-runner/0.log" Dec 05 20:21:48 crc kubenswrapper[4982]: I1205 20:21:48.999448 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-b56qn_d26e25b0-3708-4c24-9034-36a8ab878465/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 20:21:49 crc kubenswrapper[4982]: I1205 20:21:49.864246 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-592ld"] Dec 05 20:21:49 crc kubenswrapper[4982]: E1205 20:21:49.865026 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7df0020-02d9-4fc0-9402-1d95c6c27ada" containerName="container-00" Dec 05 20:21:49 crc kubenswrapper[4982]: I1205 20:21:49.865042 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7df0020-02d9-4fc0-9402-1d95c6c27ada" containerName="container-00" Dec 05 20:21:49 crc kubenswrapper[4982]: I1205 20:21:49.865339 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7df0020-02d9-4fc0-9402-1d95c6c27ada" containerName="container-00" Dec 05 20:21:49 crc kubenswrapper[4982]: I1205 20:21:49.867116 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:21:49 crc kubenswrapper[4982]: I1205 20:21:49.890530 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-592ld"] Dec 05 20:21:50 crc kubenswrapper[4982]: I1205 20:21:50.041351 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r99b\" (UniqueName: \"kubernetes.io/projected/d4906847-1446-4d9c-87e1-70ca56f17c89-kube-api-access-9r99b\") pod \"redhat-operators-592ld\" (UID: \"d4906847-1446-4d9c-87e1-70ca56f17c89\") " pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:21:50 crc kubenswrapper[4982]: I1205 20:21:50.041453 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4906847-1446-4d9c-87e1-70ca56f17c89-utilities\") pod \"redhat-operators-592ld\" (UID: \"d4906847-1446-4d9c-87e1-70ca56f17c89\") " pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:21:50 crc kubenswrapper[4982]: I1205 20:21:50.041771 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4906847-1446-4d9c-87e1-70ca56f17c89-catalog-content\") pod \"redhat-operators-592ld\" (UID: \"d4906847-1446-4d9c-87e1-70ca56f17c89\") " pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:21:50 crc kubenswrapper[4982]: I1205 20:21:50.143173 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r99b\" (UniqueName: \"kubernetes.io/projected/d4906847-1446-4d9c-87e1-70ca56f17c89-kube-api-access-9r99b\") pod \"redhat-operators-592ld\" (UID: \"d4906847-1446-4d9c-87e1-70ca56f17c89\") " pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:21:50 crc kubenswrapper[4982]: I1205 20:21:50.143448 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4906847-1446-4d9c-87e1-70ca56f17c89-utilities\") pod \"redhat-operators-592ld\" (UID: \"d4906847-1446-4d9c-87e1-70ca56f17c89\") " pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:21:50 crc kubenswrapper[4982]: I1205 20:21:50.143604 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4906847-1446-4d9c-87e1-70ca56f17c89-catalog-content\") pod \"redhat-operators-592ld\" (UID: \"d4906847-1446-4d9c-87e1-70ca56f17c89\") " pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:21:50 crc kubenswrapper[4982]: I1205 20:21:50.144037 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4906847-1446-4d9c-87e1-70ca56f17c89-utilities\") pod \"redhat-operators-592ld\" (UID: \"d4906847-1446-4d9c-87e1-70ca56f17c89\") " pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:21:50 crc kubenswrapper[4982]: I1205 20:21:50.144189 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4906847-1446-4d9c-87e1-70ca56f17c89-catalog-content\") pod \"redhat-operators-592ld\" (UID: \"d4906847-1446-4d9c-87e1-70ca56f17c89\") " pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:21:50 crc kubenswrapper[4982]: I1205 20:21:50.161245 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r99b\" (UniqueName: \"kubernetes.io/projected/d4906847-1446-4d9c-87e1-70ca56f17c89-kube-api-access-9r99b\") pod \"redhat-operators-592ld\" (UID: \"d4906847-1446-4d9c-87e1-70ca56f17c89\") " pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:21:50 crc kubenswrapper[4982]: I1205 20:21:50.216469 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:21:50 crc kubenswrapper[4982]: I1205 20:21:50.825408 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-592ld"] Dec 05 20:21:50 crc kubenswrapper[4982]: I1205 20:21:50.923406 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-592ld" event={"ID":"d4906847-1446-4d9c-87e1-70ca56f17c89","Type":"ContainerStarted","Data":"bf6dba51d03c250529d9ac66414fc4dea7a65b74a0fc279b071b25c86f026c71"} Dec 05 20:21:51 crc kubenswrapper[4982]: I1205 20:21:51.938287 4982 generic.go:334] "Generic (PLEG): container finished" podID="d4906847-1446-4d9c-87e1-70ca56f17c89" containerID="842605eb42a144cf2295a2b5445f32643eaa53a531c48a934ded7f1d835c9e8f" exitCode=0 Dec 05 20:21:51 crc kubenswrapper[4982]: I1205 20:21:51.938632 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-592ld" event={"ID":"d4906847-1446-4d9c-87e1-70ca56f17c89","Type":"ContainerDied","Data":"842605eb42a144cf2295a2b5445f32643eaa53a531c48a934ded7f1d835c9e8f"} Dec 05 20:21:52 crc kubenswrapper[4982]: I1205 20:21:52.523218 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_34fad3bb-6720-4219-8862-08492842062a/memcached/0.log" Dec 05 20:21:53 crc kubenswrapper[4982]: I1205 20:21:53.962040 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-592ld" event={"ID":"d4906847-1446-4d9c-87e1-70ca56f17c89","Type":"ContainerStarted","Data":"a48a8a7e88e2233681cf7e11d19f17ef7a533b67008910e3b757760786cceea4"} Dec 05 20:21:55 crc kubenswrapper[4982]: I1205 20:21:55.981825 4982 generic.go:334] "Generic (PLEG): container finished" podID="d4906847-1446-4d9c-87e1-70ca56f17c89" containerID="a48a8a7e88e2233681cf7e11d19f17ef7a533b67008910e3b757760786cceea4" exitCode=0 Dec 05 20:21:55 crc kubenswrapper[4982]: I1205 20:21:55.981921 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-592ld" event={"ID":"d4906847-1446-4d9c-87e1-70ca56f17c89","Type":"ContainerDied","Data":"a48a8a7e88e2233681cf7e11d19f17ef7a533b67008910e3b757760786cceea4"} Dec 05 20:21:56 crc kubenswrapper[4982]: I1205 20:21:56.994269 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-592ld" event={"ID":"d4906847-1446-4d9c-87e1-70ca56f17c89","Type":"ContainerStarted","Data":"6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79"} Dec 05 20:21:57 crc kubenswrapper[4982]: I1205 20:21:57.022874 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-592ld" podStartSLOduration=3.575721657 podStartE2EDuration="8.022842581s" podCreationTimestamp="2025-12-05 20:21:49 +0000 UTC" firstStartedPulling="2025-12-05 20:21:51.942855293 +0000 UTC m=+4090.824741288" lastFinishedPulling="2025-12-05 20:21:56.389976217 +0000 UTC m=+4095.271862212" observedRunningTime="2025-12-05 20:21:57.017495809 +0000 UTC m=+4095.899381814" watchObservedRunningTime="2025-12-05 20:21:57.022842581 +0000 UTC m=+4095.904728576" Dec 05 20:22:00 crc kubenswrapper[4982]: I1205 20:22:00.216846 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:22:00 crc kubenswrapper[4982]: I1205 20:22:00.217398 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:22:01 crc kubenswrapper[4982]: I1205 20:22:01.479818 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-592ld" podUID="d4906847-1446-4d9c-87e1-70ca56f17c89" containerName="registry-server" probeResult="failure" output=< Dec 05 20:22:01 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Dec 05 20:22:01 crc kubenswrapper[4982]: > Dec 05 20:22:10 crc kubenswrapper[4982]: I1205 20:22:10.276013 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:22:10 crc kubenswrapper[4982]: I1205 20:22:10.350471 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:22:10 crc kubenswrapper[4982]: I1205 20:22:10.511909 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-592ld"] Dec 05 20:22:12 crc kubenswrapper[4982]: I1205 20:22:12.123326 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-592ld" podUID="d4906847-1446-4d9c-87e1-70ca56f17c89" containerName="registry-server" containerID="cri-o://6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79" gracePeriod=2 Dec 05 20:22:12 crc kubenswrapper[4982]: I1205 20:22:12.800031 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:22:12 crc kubenswrapper[4982]: I1205 20:22:12.964675 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9r99b\" (UniqueName: \"kubernetes.io/projected/d4906847-1446-4d9c-87e1-70ca56f17c89-kube-api-access-9r99b\") pod \"d4906847-1446-4d9c-87e1-70ca56f17c89\" (UID: \"d4906847-1446-4d9c-87e1-70ca56f17c89\") " Dec 05 20:22:12 crc kubenswrapper[4982]: I1205 20:22:12.965327 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4906847-1446-4d9c-87e1-70ca56f17c89-utilities\") pod \"d4906847-1446-4d9c-87e1-70ca56f17c89\" (UID: \"d4906847-1446-4d9c-87e1-70ca56f17c89\") " Dec 05 20:22:12 crc kubenswrapper[4982]: I1205 20:22:12.965432 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4906847-1446-4d9c-87e1-70ca56f17c89-catalog-content\") pod \"d4906847-1446-4d9c-87e1-70ca56f17c89\" (UID: \"d4906847-1446-4d9c-87e1-70ca56f17c89\") " Dec 05 20:22:12 crc kubenswrapper[4982]: I1205 20:22:12.966740 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4906847-1446-4d9c-87e1-70ca56f17c89-utilities" (OuterVolumeSpecName: "utilities") pod "d4906847-1446-4d9c-87e1-70ca56f17c89" (UID: "d4906847-1446-4d9c-87e1-70ca56f17c89"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:22:12 crc kubenswrapper[4982]: I1205 20:22:12.986325 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4906847-1446-4d9c-87e1-70ca56f17c89-kube-api-access-9r99b" (OuterVolumeSpecName: "kube-api-access-9r99b") pod "d4906847-1446-4d9c-87e1-70ca56f17c89" (UID: "d4906847-1446-4d9c-87e1-70ca56f17c89"). InnerVolumeSpecName "kube-api-access-9r99b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.068757 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9r99b\" (UniqueName: \"kubernetes.io/projected/d4906847-1446-4d9c-87e1-70ca56f17c89-kube-api-access-9r99b\") on node \"crc\" DevicePath \"\"" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.068813 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4906847-1446-4d9c-87e1-70ca56f17c89-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.087103 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4906847-1446-4d9c-87e1-70ca56f17c89-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4906847-1446-4d9c-87e1-70ca56f17c89" (UID: "d4906847-1446-4d9c-87e1-70ca56f17c89"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.142617 4982 generic.go:334] "Generic (PLEG): container finished" podID="d4906847-1446-4d9c-87e1-70ca56f17c89" containerID="6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79" exitCode=0 Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.142654 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-592ld" event={"ID":"d4906847-1446-4d9c-87e1-70ca56f17c89","Type":"ContainerDied","Data":"6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79"} Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.142678 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-592ld" event={"ID":"d4906847-1446-4d9c-87e1-70ca56f17c89","Type":"ContainerDied","Data":"bf6dba51d03c250529d9ac66414fc4dea7a65b74a0fc279b071b25c86f026c71"} Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.142695 4982 scope.go:117] "RemoveContainer" containerID="6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.142690 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-592ld" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.161971 4982 scope.go:117] "RemoveContainer" containerID="a48a8a7e88e2233681cf7e11d19f17ef7a533b67008910e3b757760786cceea4" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.171075 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4906847-1446-4d9c-87e1-70ca56f17c89-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.186852 4982 scope.go:117] "RemoveContainer" containerID="842605eb42a144cf2295a2b5445f32643eaa53a531c48a934ded7f1d835c9e8f" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.188818 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-592ld"] Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.200862 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-592ld"] Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.228361 4982 scope.go:117] "RemoveContainer" containerID="6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79" Dec 05 20:22:13 crc kubenswrapper[4982]: E1205 20:22:13.228843 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79\": container with ID starting with 6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79 not found: ID does not exist" containerID="6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.228882 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79"} err="failed to get container status \"6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79\": rpc error: code = NotFound desc = could not find container \"6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79\": container with ID starting with 6ef0b9159c5372c57f6e4c3ee253d5f2ff1386c7df683406157b101287e49e79 not found: ID does not exist" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.228912 4982 scope.go:117] "RemoveContainer" containerID="a48a8a7e88e2233681cf7e11d19f17ef7a533b67008910e3b757760786cceea4" Dec 05 20:22:13 crc kubenswrapper[4982]: E1205 20:22:13.230800 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a48a8a7e88e2233681cf7e11d19f17ef7a533b67008910e3b757760786cceea4\": container with ID starting with a48a8a7e88e2233681cf7e11d19f17ef7a533b67008910e3b757760786cceea4 not found: ID does not exist" containerID="a48a8a7e88e2233681cf7e11d19f17ef7a533b67008910e3b757760786cceea4" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.230850 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a48a8a7e88e2233681cf7e11d19f17ef7a533b67008910e3b757760786cceea4"} err="failed to get container status \"a48a8a7e88e2233681cf7e11d19f17ef7a533b67008910e3b757760786cceea4\": rpc error: code = NotFound desc = could not find container \"a48a8a7e88e2233681cf7e11d19f17ef7a533b67008910e3b757760786cceea4\": container with ID starting with a48a8a7e88e2233681cf7e11d19f17ef7a533b67008910e3b757760786cceea4 not found: ID does not exist" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.230891 4982 scope.go:117] "RemoveContainer" containerID="842605eb42a144cf2295a2b5445f32643eaa53a531c48a934ded7f1d835c9e8f" Dec 05 20:22:13 crc kubenswrapper[4982]: E1205 20:22:13.231272 4982 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"842605eb42a144cf2295a2b5445f32643eaa53a531c48a934ded7f1d835c9e8f\": container with ID starting with 842605eb42a144cf2295a2b5445f32643eaa53a531c48a934ded7f1d835c9e8f not found: ID does not exist" containerID="842605eb42a144cf2295a2b5445f32643eaa53a531c48a934ded7f1d835c9e8f" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.231304 4982 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"842605eb42a144cf2295a2b5445f32643eaa53a531c48a934ded7f1d835c9e8f"} err="failed to get container status \"842605eb42a144cf2295a2b5445f32643eaa53a531c48a934ded7f1d835c9e8f\": rpc error: code = NotFound desc = could not find container \"842605eb42a144cf2295a2b5445f32643eaa53a531c48a934ded7f1d835c9e8f\": container with ID starting with 842605eb42a144cf2295a2b5445f32643eaa53a531c48a934ded7f1d835c9e8f not found: ID does not exist" Dec 05 20:22:13 crc kubenswrapper[4982]: I1205 20:22:13.411723 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4906847-1446-4d9c-87e1-70ca56f17c89" path="/var/lib/kubelet/pods/d4906847-1446-4d9c-87e1-70ca56f17c89/volumes" Dec 05 20:22:19 crc kubenswrapper[4982]: I1205 20:22:19.196953 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/util/0.log" Dec 05 20:22:19 crc kubenswrapper[4982]: I1205 20:22:19.384342 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/util/0.log" Dec 05 20:22:19 crc kubenswrapper[4982]: I1205 20:22:19.440718 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/pull/0.log" Dec 05 20:22:19 crc kubenswrapper[4982]: I1205 20:22:19.440747 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/pull/0.log" Dec 05 20:22:19 crc kubenswrapper[4982]: I1205 20:22:19.626565 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/util/0.log" Dec 05 20:22:19 crc kubenswrapper[4982]: I1205 20:22:19.643762 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/extract/0.log" Dec 05 20:22:19 crc kubenswrapper[4982]: I1205 20:22:19.681602 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_a3f18066751728ea411490ef42cbef94f14d4873dcda16f8427ea43c4brb4jn_d15a633b-7ce9-4676-9431-f5e40ec5a019/pull/0.log" Dec 05 20:22:19 crc kubenswrapper[4982]: I1205 20:22:19.837260 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-f8852_ebffce09-7b77-4c54-9c5a-520517cc3aa8/kube-rbac-proxy/0.log" Dec 05 20:22:19 crc kubenswrapper[4982]: I1205 20:22:19.923839 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-f8852_ebffce09-7b77-4c54-9c5a-520517cc3aa8/manager/0.log" Dec 05 20:22:19 crc kubenswrapper[4982]: I1205 20:22:19.965236 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-wrfxk_b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a/kube-rbac-proxy/0.log" Dec 05 20:22:20 crc kubenswrapper[4982]: I1205 20:22:20.129993 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-t2zqn_55a262ed-8b7c-4e28-af82-89c5df1f675b/kube-rbac-proxy/0.log" Dec 05 20:22:20 crc kubenswrapper[4982]: I1205 20:22:20.131762 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-wrfxk_b2aa8072-1a71-4ec3-aab7-38bfa55a7b9a/manager/0.log" Dec 05 20:22:20 crc kubenswrapper[4982]: I1205 20:22:20.242527 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-t2zqn_55a262ed-8b7c-4e28-af82-89c5df1f675b/manager/0.log" Dec 05 20:22:20 crc kubenswrapper[4982]: I1205 20:22:20.362697 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-p6gnj_517bb39b-5710-45f0-b70a-694dc5b4d044/kube-rbac-proxy/0.log" Dec 05 20:22:20 crc kubenswrapper[4982]: I1205 20:22:20.463977 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-p6gnj_517bb39b-5710-45f0-b70a-694dc5b4d044/manager/0.log" Dec 05 20:22:20 crc kubenswrapper[4982]: I1205 20:22:20.574048 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-rvzdw_511305c1-7bff-43ce-b398-e5aec02fa9ec/manager/0.log" Dec 05 20:22:20 crc kubenswrapper[4982]: I1205 20:22:20.632672 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-rvzdw_511305c1-7bff-43ce-b398-e5aec02fa9ec/kube-rbac-proxy/0.log" Dec 05 20:22:20 crc kubenswrapper[4982]: I1205 20:22:20.773227 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-rrjv5_25671f72-e601-41d8-9617-fb9c436e7959/manager/0.log" Dec 05 20:22:20 crc kubenswrapper[4982]: I1205 20:22:20.779562 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-rrjv5_25671f72-e601-41d8-9617-fb9c436e7959/kube-rbac-proxy/0.log" Dec 05 20:22:20 crc kubenswrapper[4982]: I1205 20:22:20.939289 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-fnkqx_ab5cdcbf-c82f-48be-a97d-65a856e95bd9/kube-rbac-proxy/0.log" Dec 05 20:22:21 crc kubenswrapper[4982]: I1205 20:22:21.118729 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-ms8zf_5cd3d875-b57b-4b61-ac66-17035d351f35/kube-rbac-proxy/0.log" Dec 05 20:22:21 crc kubenswrapper[4982]: I1205 20:22:21.141316 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-fnkqx_ab5cdcbf-c82f-48be-a97d-65a856e95bd9/manager/0.log" Dec 05 20:22:21 crc kubenswrapper[4982]: I1205 20:22:21.163819 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-ms8zf_5cd3d875-b57b-4b61-ac66-17035d351f35/manager/0.log" Dec 05 20:22:21 crc kubenswrapper[4982]: I1205 20:22:21.278317 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-w8qcr_701bf927-7bac-49a4-9435-a68ebd3ff8c4/kube-rbac-proxy/0.log" Dec 05 20:22:21 crc kubenswrapper[4982]: I1205 20:22:21.396972 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-w8qcr_701bf927-7bac-49a4-9435-a68ebd3ff8c4/manager/0.log" Dec 05 20:22:21 crc kubenswrapper[4982]: I1205 20:22:21.486761 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-ctg6p_d30f627a-9e43-4435-aaf3-31a0631bfcba/kube-rbac-proxy/0.log" Dec 05 20:22:21 crc kubenswrapper[4982]: I1205 20:22:21.596459 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-ctg6p_d30f627a-9e43-4435-aaf3-31a0631bfcba/manager/0.log" Dec 05 20:22:21 crc kubenswrapper[4982]: I1205 20:22:21.649072 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-472kw_d4adcb8f-8951-4c59-adf3-e94b1a5e202b/kube-rbac-proxy/0.log" Dec 05 20:22:21 crc kubenswrapper[4982]: I1205 20:22:21.740725 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-472kw_d4adcb8f-8951-4c59-adf3-e94b1a5e202b/manager/0.log" Dec 05 20:22:22 crc kubenswrapper[4982]: I1205 20:22:22.222755 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-7rqsz_b4d06e27-b91f-4602-b327-3435d8977280/kube-rbac-proxy/0.log" Dec 05 20:22:22 crc kubenswrapper[4982]: I1205 20:22:22.315103 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-7rqsz_b4d06e27-b91f-4602-b327-3435d8977280/manager/0.log" Dec 05 20:22:22 crc kubenswrapper[4982]: I1205 20:22:22.345482 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-7hbtv_2e0bfce4-bfd5-49ae-858f-647f5f8a919e/kube-rbac-proxy/0.log" Dec 05 20:22:22 crc kubenswrapper[4982]: I1205 20:22:22.502124 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-7hbtv_2e0bfce4-bfd5-49ae-858f-647f5f8a919e/manager/0.log" Dec 05 20:22:22 crc kubenswrapper[4982]: I1205 20:22:22.526397 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-d6svn_983b81df-c036-4f75-8d49-259f09235991/kube-rbac-proxy/0.log" Dec 05 20:22:22 crc kubenswrapper[4982]: I1205 20:22:22.588556 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-d6svn_983b81df-c036-4f75-8d49-259f09235991/manager/0.log" Dec 05 20:22:22 crc kubenswrapper[4982]: I1205 20:22:22.738376 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl_c286a8e0-15ff-4705-a03f-bca226144360/manager/0.log" Dec 05 20:22:22 crc kubenswrapper[4982]: I1205 20:22:22.752987 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd44jhwl_c286a8e0-15ff-4705-a03f-bca226144360/kube-rbac-proxy/0.log" Dec 05 20:22:23 crc kubenswrapper[4982]: I1205 20:22:23.061203 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-2f2gp_c5e545c3-45c4-4c76-a6cf-e45385919b9d/registry-server/0.log" Dec 05 20:22:23 crc kubenswrapper[4982]: I1205 20:22:23.258661 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-79fbdbdfdc-dt7k6_5619a079-821c-49cb-84f5-136f41ff45a5/operator/0.log" Dec 05 20:22:23 crc kubenswrapper[4982]: I1205 20:22:23.351219 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-xww76_a964acf3-fa80-4561-86da-c831a10fc58e/kube-rbac-proxy/0.log" Dec 05 20:22:23 crc kubenswrapper[4982]: I1205 20:22:23.449935 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-xww76_a964acf3-fa80-4561-86da-c831a10fc58e/manager/0.log" Dec 05 20:22:23 crc kubenswrapper[4982]: I1205 20:22:23.581656 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-kzpx6_c959458c-8a9a-4d37-889a-577a673e5305/kube-rbac-proxy/0.log" Dec 05 20:22:23 crc kubenswrapper[4982]: I1205 20:22:23.639339 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-kzpx6_c959458c-8a9a-4d37-889a-577a673e5305/manager/0.log" Dec 05 20:22:23 crc kubenswrapper[4982]: I1205 20:22:23.755505 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-wst8r_1f84d21b-6ce8-4c97-a104-cb308ce8527d/operator/0.log" Dec 05 20:22:24 crc kubenswrapper[4982]: I1205 20:22:24.271421 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-54bf4fb767-47tg5_eb889ad3-88cd-45b4-9b56-13d3181ba3e6/manager/0.log" Dec 05 20:22:24 crc kubenswrapper[4982]: I1205 20:22:24.401902 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-8ttpr_167e755e-d998-47ca-88dd-0bc17c975864/kube-rbac-proxy/0.log" Dec 05 20:22:24 crc kubenswrapper[4982]: I1205 20:22:24.450091 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-8ttpr_167e755e-d998-47ca-88dd-0bc17c975864/manager/0.log" Dec 05 20:22:24 crc kubenswrapper[4982]: I1205 20:22:24.462973 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6b4849bfff-skwf7_7b1c4531-0231-42d4-94e9-0a211394dfa6/kube-rbac-proxy/0.log" Dec 05 20:22:24 crc kubenswrapper[4982]: I1205 20:22:24.623933 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d48dh_26fa2fe6-70bb-4a70-8bee-b0cde872beb1/kube-rbac-proxy/0.log" Dec 05 20:22:24 crc kubenswrapper[4982]: I1205 20:22:24.700619 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-d48dh_26fa2fe6-70bb-4a70-8bee-b0cde872beb1/manager/0.log" Dec 05 20:22:24 crc kubenswrapper[4982]: I1205 20:22:24.854685 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-829rq_cf51df5f-9adb-4929-9a00-6bfeafdfa069/kube-rbac-proxy/0.log" Dec 05 20:22:24 crc kubenswrapper[4982]: I1205 20:22:24.919728 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-6b4849bfff-skwf7_7b1c4531-0231-42d4-94e9-0a211394dfa6/manager/0.log" Dec 05 20:22:24 crc kubenswrapper[4982]: I1205 20:22:24.939419 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-829rq_cf51df5f-9adb-4929-9a00-6bfeafdfa069/manager/0.log" Dec 05 20:22:42 crc kubenswrapper[4982]: I1205 20:22:42.557372 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:22:42 crc kubenswrapper[4982]: I1205 20:22:42.557776 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:22:45 crc kubenswrapper[4982]: I1205 20:22:45.457381 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-xctm7_72f8774d-c2a9-4489-9812-2b72525fe9d9/control-plane-machine-set-operator/0.log" Dec 05 20:22:45 crc kubenswrapper[4982]: I1205 20:22:45.831980 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-bh4fj_8d4c7ce2-7724-494b-b86a-23627074ce45/kube-rbac-proxy/0.log" Dec 05 20:22:45 crc kubenswrapper[4982]: I1205 20:22:45.920617 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-bh4fj_8d4c7ce2-7724-494b-b86a-23627074ce45/machine-api-operator/0.log" Dec 05 20:22:59 crc kubenswrapper[4982]: I1205 20:22:59.861881 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-skf6w_f1ca36be-fe08-4f98-be99-35f9e8265a79/cert-manager-controller/0.log" Dec 05 20:23:00 crc kubenswrapper[4982]: I1205 20:23:00.020331 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-sxnb8_9825df92-fee9-4a92-b324-62162aacc6fe/cert-manager-cainjector/0.log" Dec 05 20:23:00 crc kubenswrapper[4982]: I1205 20:23:00.048673 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-wtdnn_f52e413e-741e-4b30-b1d7-e687e31c16e5/cert-manager-webhook/0.log" Dec 05 20:23:12 crc kubenswrapper[4982]: I1205 20:23:12.557054 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:23:12 crc kubenswrapper[4982]: I1205 20:23:12.557561 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:23:15 crc kubenswrapper[4982]: I1205 20:23:15.522033 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-2zqs2_47097c85-dd76-46fd-b837-c5f9e2f5b599/nmstate-console-plugin/0.log" Dec 05 20:23:15 crc kubenswrapper[4982]: I1205 20:23:15.769322 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-l8vt9_d69403b9-dbd1-4059-afca-e7a907dd2c08/kube-rbac-proxy/0.log" Dec 05 20:23:15 crc kubenswrapper[4982]: I1205 20:23:15.780107 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-4dnv7_38b59b38-39e3-4a09-b50e-0cfa9035cd3f/nmstate-handler/0.log" Dec 05 20:23:15 crc kubenswrapper[4982]: I1205 20:23:15.848994 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-l8vt9_d69403b9-dbd1-4059-afca-e7a907dd2c08/nmstate-metrics/0.log" Dec 05 20:23:15 crc kubenswrapper[4982]: I1205 20:23:15.978553 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-s9fkp_0a9ab7cb-3dc4-4b37-9bc0-a1bdf690306d/nmstate-operator/0.log" Dec 05 20:23:16 crc kubenswrapper[4982]: I1205 20:23:16.062434 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-lrtmh_085cb474-3881-41ed-b0fa-6a3d237ec343/nmstate-webhook/0.log" Dec 05 20:23:30 crc kubenswrapper[4982]: I1205 20:23:30.996281 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-684f549574-q5qzv_362ad9c9-a652-4965-9d36-10c0332bff02/kube-rbac-proxy/0.log" Dec 05 20:23:31 crc kubenswrapper[4982]: I1205 20:23:31.058851 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-684f549574-q5qzv_362ad9c9-a652-4965-9d36-10c0332bff02/manager/0.log" Dec 05 20:23:42 crc kubenswrapper[4982]: I1205 20:23:42.557019 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:23:42 crc kubenswrapper[4982]: I1205 20:23:42.557693 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:23:42 crc kubenswrapper[4982]: I1205 20:23:42.557758 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 20:23:42 crc kubenswrapper[4982]: I1205 20:23:42.558568 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5c01852a9608de15cb4551f00909b972e10154a4e9408a8f22cfcef0f86b533d"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 20:23:42 crc kubenswrapper[4982]: I1205 20:23:42.558655 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://5c01852a9608de15cb4551f00909b972e10154a4e9408a8f22cfcef0f86b533d" gracePeriod=600 Dec 05 20:23:43 crc kubenswrapper[4982]: I1205 20:23:43.073235 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="5c01852a9608de15cb4551f00909b972e10154a4e9408a8f22cfcef0f86b533d" exitCode=0 Dec 05 20:23:43 crc kubenswrapper[4982]: I1205 20:23:43.073333 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"5c01852a9608de15cb4551f00909b972e10154a4e9408a8f22cfcef0f86b533d"} Dec 05 20:23:43 crc kubenswrapper[4982]: I1205 20:23:43.073625 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerStarted","Data":"fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1"} Dec 05 20:23:43 crc kubenswrapper[4982]: I1205 20:23:43.073656 4982 scope.go:117] "RemoveContainer" containerID="8a2ec92be92962086c84ac98c8bf45bf82fa7db666856949047b60983d1a077c" Dec 05 20:23:47 crc kubenswrapper[4982]: I1205 20:23:47.322714 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-f2lxm_e5934f26-82dc-4376-9073-0d24a57aadb1/kube-rbac-proxy/0.log" Dec 05 20:23:47 crc kubenswrapper[4982]: I1205 20:23:47.408208 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-f2lxm_e5934f26-82dc-4376-9073-0d24a57aadb1/controller/0.log" Dec 05 20:23:47 crc kubenswrapper[4982]: I1205 20:23:47.681128 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-frr-files/0.log" Dec 05 20:23:47 crc kubenswrapper[4982]: I1205 20:23:47.883233 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-frr-files/0.log" Dec 05 20:23:47 crc kubenswrapper[4982]: I1205 20:23:47.883791 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-reloader/0.log" Dec 05 20:23:47 crc kubenswrapper[4982]: I1205 20:23:47.918090 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-metrics/0.log" Dec 05 20:23:47 crc kubenswrapper[4982]: I1205 20:23:47.971760 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-reloader/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.171509 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-reloader/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.171906 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-metrics/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.174061 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-frr-files/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.180474 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-metrics/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.373934 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-reloader/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.419279 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-metrics/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.420348 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/cp-frr-files/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.421773 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/controller/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.615160 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/kube-rbac-proxy-frr/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.663919 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/kube-rbac-proxy/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.676820 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/frr-metrics/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.875615 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-t9drp_8ad46eb3-de5d-4122-82fe-5cf11faf01bc/frr-k8s-webhook-server/0.log" Dec 05 20:23:48 crc kubenswrapper[4982]: I1205 20:23:48.876338 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/reloader/0.log" Dec 05 20:23:49 crc kubenswrapper[4982]: I1205 20:23:49.247324 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7d4d6d9964-7tmb4_4ba9f951-374b-45ba-a5d1-de7393862f1d/manager/0.log" Dec 05 20:23:49 crc kubenswrapper[4982]: I1205 20:23:49.480489 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-8ddfdf549-2zbg6_858ccc6d-475e-4636-b597-c155973b2e85/webhook-server/0.log" Dec 05 20:23:49 crc kubenswrapper[4982]: I1205 20:23:49.513871 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-68bx7_9d70d003-cbaf-4f61-b929-2803e9789657/kube-rbac-proxy/0.log" Dec 05 20:23:49 crc kubenswrapper[4982]: I1205 20:23:49.983308 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-pfpc6_de9c5641-2450-4451-ab4e-dc16a34a094c/frr/0.log" Dec 05 20:23:50 crc kubenswrapper[4982]: I1205 20:23:50.086813 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-68bx7_9d70d003-cbaf-4f61-b929-2803e9789657/speaker/0.log" Dec 05 20:24:05 crc kubenswrapper[4982]: I1205 20:24:05.747808 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/util/0.log" Dec 05 20:24:05 crc kubenswrapper[4982]: I1205 20:24:05.979464 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/pull/0.log" Dec 05 20:24:06 crc kubenswrapper[4982]: I1205 20:24:06.005378 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/util/0.log" Dec 05 20:24:06 crc kubenswrapper[4982]: I1205 20:24:06.036400 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/pull/0.log" Dec 05 20:24:06 crc kubenswrapper[4982]: I1205 20:24:06.215741 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/util/0.log" Dec 05 20:24:06 crc kubenswrapper[4982]: I1205 20:24:06.229444 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/pull/0.log" Dec 05 20:24:06 crc kubenswrapper[4982]: I1205 20:24:06.255484 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fjzwg7_4ef522e3-7448-4261-b647-d5bb5a547dc7/extract/0.log" Dec 05 20:24:06 crc kubenswrapper[4982]: I1205 20:24:06.741135 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/util/0.log" Dec 05 20:24:06 crc kubenswrapper[4982]: I1205 20:24:06.957032 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/pull/0.log" Dec 05 20:24:06 crc kubenswrapper[4982]: I1205 20:24:06.972032 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/util/0.log" Dec 05 20:24:06 crc kubenswrapper[4982]: I1205 20:24:06.979598 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/pull/0.log" Dec 05 20:24:07 crc kubenswrapper[4982]: I1205 20:24:07.161705 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/extract/0.log" Dec 05 20:24:07 crc kubenswrapper[4982]: I1205 20:24:07.188203 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/pull/0.log" Dec 05 20:24:07 crc kubenswrapper[4982]: I1205 20:24:07.199099 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210zfqwf_e2e84dc1-b99f-497b-9ff4-d0502756b48c/util/0.log" Dec 05 20:24:07 crc kubenswrapper[4982]: I1205 20:24:07.370906 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/util/0.log" Dec 05 20:24:07 crc kubenswrapper[4982]: I1205 20:24:07.516899 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/util/0.log" Dec 05 20:24:07 crc kubenswrapper[4982]: I1205 20:24:07.538373 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/pull/0.log" Dec 05 20:24:07 crc kubenswrapper[4982]: I1205 20:24:07.569405 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/pull/0.log" Dec 05 20:24:07 crc kubenswrapper[4982]: I1205 20:24:07.753352 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/util/0.log" Dec 05 20:24:07 crc kubenswrapper[4982]: I1205 20:24:07.761164 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/extract/0.log" Dec 05 20:24:07 crc kubenswrapper[4982]: I1205 20:24:07.777565 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c1wpjqj_778aa6ce-5b87-4f63-b2dc-72daba528154/pull/0.log" Dec 05 20:24:07 crc kubenswrapper[4982]: I1205 20:24:07.946875 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/util/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.087581 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/pull/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.116848 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/pull/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.148963 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/util/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.308283 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/extract/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.327836 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/pull/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.350974 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83gwxvr_6b00ab98-889d-421f-a3f4-1d9bacddb215/util/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.448318 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/extract-utilities/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.679387 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/extract-content/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.693309 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/extract-content/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.697712 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/extract-utilities/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.867449 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/extract-content/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.907932 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/extract-utilities/0.log" Dec 05 20:24:08 crc kubenswrapper[4982]: I1205 20:24:08.974694 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/extract-utilities/0.log" Dec 05 20:24:09 crc kubenswrapper[4982]: I1205 20:24:09.135372 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/extract-content/0.log" Dec 05 20:24:09 crc kubenswrapper[4982]: I1205 20:24:09.148590 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/extract-utilities/0.log" Dec 05 20:24:09 crc kubenswrapper[4982]: I1205 20:24:09.193557 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/extract-content/0.log" Dec 05 20:24:09 crc kubenswrapper[4982]: I1205 20:24:09.452493 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/extract-utilities/0.log" Dec 05 20:24:09 crc kubenswrapper[4982]: I1205 20:24:09.506100 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-dv6hf_0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/registry-server/0.log" Dec 05 20:24:09 crc kubenswrapper[4982]: I1205 20:24:09.567789 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/extract-content/0.log" Dec 05 20:24:09 crc kubenswrapper[4982]: I1205 20:24:09.721678 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-n4462_66d787d5-2cd9-4a22-8549-acd33135e4f9/marketplace-operator/0.log" Dec 05 20:24:09 crc kubenswrapper[4982]: I1205 20:24:09.794858 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/extract-utilities/0.log" Dec 05 20:24:09 crc kubenswrapper[4982]: I1205 20:24:09.994230 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/extract-utilities/0.log" Dec 05 20:24:10 crc kubenswrapper[4982]: I1205 20:24:10.076944 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/extract-content/0.log" Dec 05 20:24:10 crc kubenswrapper[4982]: I1205 20:24:10.088776 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/extract-content/0.log" Dec 05 20:24:10 crc kubenswrapper[4982]: I1205 20:24:10.159160 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-65jpq_2b5c769d-026f-40ae-a15d-c1916e429335/registry-server/0.log" Dec 05 20:24:10 crc kubenswrapper[4982]: I1205 20:24:10.248255 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/extract-content/0.log" Dec 05 20:24:10 crc kubenswrapper[4982]: I1205 20:24:10.259401 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/extract-utilities/0.log" Dec 05 20:24:10 crc kubenswrapper[4982]: I1205 20:24:10.390785 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-lbk8q_20fee44a-379d-443b-ae53-3d595e7bcdb1/registry-server/0.log" Dec 05 20:24:10 crc kubenswrapper[4982]: I1205 20:24:10.412613 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/extract-utilities/0.log" Dec 05 20:24:10 crc kubenswrapper[4982]: I1205 20:24:10.708125 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/extract-utilities/0.log" Dec 05 20:24:10 crc kubenswrapper[4982]: I1205 20:24:10.746974 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/extract-content/0.log" Dec 05 20:24:10 crc kubenswrapper[4982]: I1205 20:24:10.749554 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/extract-content/0.log" Dec 05 20:24:10 crc kubenswrapper[4982]: I1205 20:24:10.931390 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/extract-content/0.log" Dec 05 20:24:10 crc kubenswrapper[4982]: I1205 20:24:10.946939 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/extract-utilities/0.log" Dec 05 20:24:11 crc kubenswrapper[4982]: I1205 20:24:11.403019 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-7llfc_e357daf9-c7b6-4ebb-a5a0-0c1046ba7037/registry-server/0.log" Dec 05 20:24:26 crc kubenswrapper[4982]: I1205 20:24:26.898961 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-5mhck_e2c8beeb-010c-4aac-b407-981a15acaee9/prometheus-operator/0.log" Dec 05 20:24:27 crc kubenswrapper[4982]: I1205 20:24:27.612714 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6bd4749fc8-5hjfw_1624c766-4ba0-48bf-a7b7-4a8322251e2e/prometheus-operator-admission-webhook/0.log" Dec 05 20:24:27 crc kubenswrapper[4982]: I1205 20:24:27.643142 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-6bd4749fc8-mg9cz_fe591891-51d5-49c8-880a-213703150e27/prometheus-operator-admission-webhook/0.log" Dec 05 20:24:27 crc kubenswrapper[4982]: I1205 20:24:27.812738 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-nwfp4_abe3cbe3-f02e-4fc5-81e5-cb02da29d18b/operator/0.log" Dec 05 20:24:27 crc kubenswrapper[4982]: I1205 20:24:27.884747 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-lz5fb_15088f20-542f-426d-9e0f-cfb52b660483/perses-operator/0.log" Dec 05 20:24:41 crc kubenswrapper[4982]: I1205 20:24:41.904162 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-684f549574-q5qzv_362ad9c9-a652-4965-9d36-10c0332bff02/kube-rbac-proxy/0.log" Dec 05 20:24:41 crc kubenswrapper[4982]: I1205 20:24:41.913625 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-684f549574-q5qzv_362ad9c9-a652-4965-9d36-10c0332bff02/manager/0.log" Dec 05 20:25:42 crc kubenswrapper[4982]: I1205 20:25:42.557400 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:25:42 crc kubenswrapper[4982]: I1205 20:25:42.557994 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:26:12 crc kubenswrapper[4982]: I1205 20:26:12.557009 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:26:12 crc kubenswrapper[4982]: I1205 20:26:12.557782 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:26:28 crc kubenswrapper[4982]: I1205 20:26:28.392020 4982 generic.go:334] "Generic (PLEG): container finished" podID="9684edc7-e2f1-4e85-ab1b-5d5181219875" containerID="9c74d3039f9648d9dc56c379229186ee357a70088cc6619f1e317996f1b653b4" exitCode=0 Dec 05 20:26:28 crc kubenswrapper[4982]: I1205 20:26:28.392099 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wkr2f/must-gather-fm4bf" event={"ID":"9684edc7-e2f1-4e85-ab1b-5d5181219875","Type":"ContainerDied","Data":"9c74d3039f9648d9dc56c379229186ee357a70088cc6619f1e317996f1b653b4"} Dec 05 20:26:28 crc kubenswrapper[4982]: I1205 20:26:28.392977 4982 scope.go:117] "RemoveContainer" containerID="9c74d3039f9648d9dc56c379229186ee357a70088cc6619f1e317996f1b653b4" Dec 05 20:26:28 crc kubenswrapper[4982]: I1205 20:26:28.681349 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wkr2f_must-gather-fm4bf_9684edc7-e2f1-4e85-ab1b-5d5181219875/gather/0.log" Dec 05 20:26:39 crc kubenswrapper[4982]: I1205 20:26:39.905300 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wkr2f/must-gather-fm4bf"] Dec 05 20:26:39 crc kubenswrapper[4982]: I1205 20:26:39.906041 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-wkr2f/must-gather-fm4bf" podUID="9684edc7-e2f1-4e85-ab1b-5d5181219875" containerName="copy" containerID="cri-o://0adbb8c1020477b86faa2431d44064430beea8a51fcf4fc2a8f08e27285a4b5d" gracePeriod=2 Dec 05 20:26:39 crc kubenswrapper[4982]: I1205 20:26:39.915162 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wkr2f/must-gather-fm4bf"] Dec 05 20:26:40 crc kubenswrapper[4982]: I1205 20:26:40.536043 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wkr2f_must-gather-fm4bf_9684edc7-e2f1-4e85-ab1b-5d5181219875/copy/0.log" Dec 05 20:26:40 crc kubenswrapper[4982]: I1205 20:26:40.537216 4982 generic.go:334] "Generic (PLEG): container finished" podID="9684edc7-e2f1-4e85-ab1b-5d5181219875" containerID="0adbb8c1020477b86faa2431d44064430beea8a51fcf4fc2a8f08e27285a4b5d" exitCode=143 Dec 05 20:26:40 crc kubenswrapper[4982]: I1205 20:26:40.639129 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wkr2f_must-gather-fm4bf_9684edc7-e2f1-4e85-ab1b-5d5181219875/copy/0.log" Dec 05 20:26:40 crc kubenswrapper[4982]: I1205 20:26:40.639699 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/must-gather-fm4bf" Dec 05 20:26:40 crc kubenswrapper[4982]: I1205 20:26:40.723799 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rq97\" (UniqueName: \"kubernetes.io/projected/9684edc7-e2f1-4e85-ab1b-5d5181219875-kube-api-access-4rq97\") pod \"9684edc7-e2f1-4e85-ab1b-5d5181219875\" (UID: \"9684edc7-e2f1-4e85-ab1b-5d5181219875\") " Dec 05 20:26:40 crc kubenswrapper[4982]: I1205 20:26:40.723952 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9684edc7-e2f1-4e85-ab1b-5d5181219875-must-gather-output\") pod \"9684edc7-e2f1-4e85-ab1b-5d5181219875\" (UID: \"9684edc7-e2f1-4e85-ab1b-5d5181219875\") " Dec 05 20:26:40 crc kubenswrapper[4982]: I1205 20:26:40.739011 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9684edc7-e2f1-4e85-ab1b-5d5181219875-kube-api-access-4rq97" (OuterVolumeSpecName: "kube-api-access-4rq97") pod "9684edc7-e2f1-4e85-ab1b-5d5181219875" (UID: "9684edc7-e2f1-4e85-ab1b-5d5181219875"). InnerVolumeSpecName "kube-api-access-4rq97". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:26:40 crc kubenswrapper[4982]: I1205 20:26:40.826354 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rq97\" (UniqueName: \"kubernetes.io/projected/9684edc7-e2f1-4e85-ab1b-5d5181219875-kube-api-access-4rq97\") on node \"crc\" DevicePath \"\"" Dec 05 20:26:40 crc kubenswrapper[4982]: I1205 20:26:40.901298 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9684edc7-e2f1-4e85-ab1b-5d5181219875-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "9684edc7-e2f1-4e85-ab1b-5d5181219875" (UID: "9684edc7-e2f1-4e85-ab1b-5d5181219875"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:26:40 crc kubenswrapper[4982]: I1205 20:26:40.928174 4982 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/9684edc7-e2f1-4e85-ab1b-5d5181219875-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 20:26:41 crc kubenswrapper[4982]: I1205 20:26:41.404360 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9684edc7-e2f1-4e85-ab1b-5d5181219875" path="/var/lib/kubelet/pods/9684edc7-e2f1-4e85-ab1b-5d5181219875/volumes" Dec 05 20:26:41 crc kubenswrapper[4982]: I1205 20:26:41.547703 4982 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wkr2f_must-gather-fm4bf_9684edc7-e2f1-4e85-ab1b-5d5181219875/copy/0.log" Dec 05 20:26:41 crc kubenswrapper[4982]: I1205 20:26:41.548093 4982 scope.go:117] "RemoveContainer" containerID="0adbb8c1020477b86faa2431d44064430beea8a51fcf4fc2a8f08e27285a4b5d" Dec 05 20:26:41 crc kubenswrapper[4982]: I1205 20:26:41.548134 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wkr2f/must-gather-fm4bf" Dec 05 20:26:41 crc kubenswrapper[4982]: I1205 20:26:41.564778 4982 scope.go:117] "RemoveContainer" containerID="9c74d3039f9648d9dc56c379229186ee357a70088cc6619f1e317996f1b653b4" Dec 05 20:26:42 crc kubenswrapper[4982]: I1205 20:26:42.557369 4982 patch_prober.go:28] interesting pod/machine-config-daemon-dldj9 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 20:26:42 crc kubenswrapper[4982]: I1205 20:26:42.557653 4982 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 20:26:42 crc kubenswrapper[4982]: I1205 20:26:42.557705 4982 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" Dec 05 20:26:42 crc kubenswrapper[4982]: I1205 20:26:42.558642 4982 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1"} pod="openshift-machine-config-operator/machine-config-daemon-dldj9" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 20:26:42 crc kubenswrapper[4982]: I1205 20:26:42.558692 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" containerName="machine-config-daemon" containerID="cri-o://fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" gracePeriod=600 Dec 05 20:26:42 crc kubenswrapper[4982]: E1205 20:26:42.676722 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:26:43 crc kubenswrapper[4982]: I1205 20:26:43.590997 4982 generic.go:334] "Generic (PLEG): container finished" podID="6902f814-e220-41f2-887a-1831b28c14ee" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" exitCode=0 Dec 05 20:26:43 crc kubenswrapper[4982]: I1205 20:26:43.591098 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" event={"ID":"6902f814-e220-41f2-887a-1831b28c14ee","Type":"ContainerDied","Data":"fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1"} Dec 05 20:26:43 crc kubenswrapper[4982]: I1205 20:26:43.591183 4982 scope.go:117] "RemoveContainer" containerID="5c01852a9608de15cb4551f00909b972e10154a4e9408a8f22cfcef0f86b533d" Dec 05 20:26:43 crc kubenswrapper[4982]: I1205 20:26:43.591911 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:26:43 crc kubenswrapper[4982]: E1205 20:26:43.592279 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:26:52 crc kubenswrapper[4982]: I1205 20:26:52.324295 4982 scope.go:117] "RemoveContainer" containerID="ab5f4e6fa0f65c2aa9d0a4813c83c8e9da78ddcd318a95ab1a929644d769e26b" Dec 05 20:26:55 crc kubenswrapper[4982]: I1205 20:26:55.393102 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:26:55 crc kubenswrapper[4982]: E1205 20:26:55.394799 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.714382 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qbd6m"] Dec 05 20:27:06 crc kubenswrapper[4982]: E1205 20:27:06.715330 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9684edc7-e2f1-4e85-ab1b-5d5181219875" containerName="gather" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.715408 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9684edc7-e2f1-4e85-ab1b-5d5181219875" containerName="gather" Dec 05 20:27:06 crc kubenswrapper[4982]: E1205 20:27:06.715425 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4906847-1446-4d9c-87e1-70ca56f17c89" containerName="registry-server" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.715431 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4906847-1446-4d9c-87e1-70ca56f17c89" containerName="registry-server" Dec 05 20:27:06 crc kubenswrapper[4982]: E1205 20:27:06.715447 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9684edc7-e2f1-4e85-ab1b-5d5181219875" containerName="copy" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.715452 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="9684edc7-e2f1-4e85-ab1b-5d5181219875" containerName="copy" Dec 05 20:27:06 crc kubenswrapper[4982]: E1205 20:27:06.715460 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4906847-1446-4d9c-87e1-70ca56f17c89" containerName="extract-content" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.715465 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4906847-1446-4d9c-87e1-70ca56f17c89" containerName="extract-content" Dec 05 20:27:06 crc kubenswrapper[4982]: E1205 20:27:06.715477 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4906847-1446-4d9c-87e1-70ca56f17c89" containerName="extract-utilities" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.715483 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4906847-1446-4d9c-87e1-70ca56f17c89" containerName="extract-utilities" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.715707 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4906847-1446-4d9c-87e1-70ca56f17c89" containerName="registry-server" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.715727 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9684edc7-e2f1-4e85-ab1b-5d5181219875" containerName="copy" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.715739 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="9684edc7-e2f1-4e85-ab1b-5d5181219875" containerName="gather" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.717911 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.741051 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qbd6m"] Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.872113 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d01440ea-1906-42a5-95e4-faa41adcb732-catalog-content\") pod \"certified-operators-qbd6m\" (UID: \"d01440ea-1906-42a5-95e4-faa41adcb732\") " pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.872313 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h6f78\" (UniqueName: \"kubernetes.io/projected/d01440ea-1906-42a5-95e4-faa41adcb732-kube-api-access-h6f78\") pod \"certified-operators-qbd6m\" (UID: \"d01440ea-1906-42a5-95e4-faa41adcb732\") " pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.872461 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d01440ea-1906-42a5-95e4-faa41adcb732-utilities\") pod \"certified-operators-qbd6m\" (UID: \"d01440ea-1906-42a5-95e4-faa41adcb732\") " pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.974824 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d01440ea-1906-42a5-95e4-faa41adcb732-utilities\") pod \"certified-operators-qbd6m\" (UID: \"d01440ea-1906-42a5-95e4-faa41adcb732\") " pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.975042 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d01440ea-1906-42a5-95e4-faa41adcb732-catalog-content\") pod \"certified-operators-qbd6m\" (UID: \"d01440ea-1906-42a5-95e4-faa41adcb732\") " pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.975180 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h6f78\" (UniqueName: \"kubernetes.io/projected/d01440ea-1906-42a5-95e4-faa41adcb732-kube-api-access-h6f78\") pod \"certified-operators-qbd6m\" (UID: \"d01440ea-1906-42a5-95e4-faa41adcb732\") " pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.975470 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d01440ea-1906-42a5-95e4-faa41adcb732-utilities\") pod \"certified-operators-qbd6m\" (UID: \"d01440ea-1906-42a5-95e4-faa41adcb732\") " pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.975642 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d01440ea-1906-42a5-95e4-faa41adcb732-catalog-content\") pod \"certified-operators-qbd6m\" (UID: \"d01440ea-1906-42a5-95e4-faa41adcb732\") " pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:06 crc kubenswrapper[4982]: I1205 20:27:06.995734 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h6f78\" (UniqueName: \"kubernetes.io/projected/d01440ea-1906-42a5-95e4-faa41adcb732-kube-api-access-h6f78\") pod \"certified-operators-qbd6m\" (UID: \"d01440ea-1906-42a5-95e4-faa41adcb732\") " pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:07 crc kubenswrapper[4982]: I1205 20:27:07.048498 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:07 crc kubenswrapper[4982]: I1205 20:27:07.706993 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qbd6m"] Dec 05 20:27:07 crc kubenswrapper[4982]: I1205 20:27:07.824997 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qbd6m" event={"ID":"d01440ea-1906-42a5-95e4-faa41adcb732","Type":"ContainerStarted","Data":"896d3ee6ed4277965f76f58ebcbf6ca6ccf80f3d85e0ade232f8d70a95e486b3"} Dec 05 20:27:08 crc kubenswrapper[4982]: I1205 20:27:08.844632 4982 generic.go:334] "Generic (PLEG): container finished" podID="d01440ea-1906-42a5-95e4-faa41adcb732" containerID="ad7c59f6f81c6ba1a7fb5156c1758ccf2bf3aa89f5d9f1f54bf1113422e9ac58" exitCode=0 Dec 05 20:27:08 crc kubenswrapper[4982]: I1205 20:27:08.844943 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qbd6m" event={"ID":"d01440ea-1906-42a5-95e4-faa41adcb732","Type":"ContainerDied","Data":"ad7c59f6f81c6ba1a7fb5156c1758ccf2bf3aa89f5d9f1f54bf1113422e9ac58"} Dec 05 20:27:08 crc kubenswrapper[4982]: I1205 20:27:08.847117 4982 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.119750 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vr4sc"] Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.122071 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.166779 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vr4sc"] Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.221457 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zm2nf\" (UniqueName: \"kubernetes.io/projected/f7be47ab-2bb9-4335-9cdd-c457214bffcc-kube-api-access-zm2nf\") pod \"community-operators-vr4sc\" (UID: \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\") " pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.221507 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7be47ab-2bb9-4335-9cdd-c457214bffcc-utilities\") pod \"community-operators-vr4sc\" (UID: \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\") " pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.221547 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7be47ab-2bb9-4335-9cdd-c457214bffcc-catalog-content\") pod \"community-operators-vr4sc\" (UID: \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\") " pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.323175 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zm2nf\" (UniqueName: \"kubernetes.io/projected/f7be47ab-2bb9-4335-9cdd-c457214bffcc-kube-api-access-zm2nf\") pod \"community-operators-vr4sc\" (UID: \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\") " pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.323457 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7be47ab-2bb9-4335-9cdd-c457214bffcc-utilities\") pod \"community-operators-vr4sc\" (UID: \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\") " pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.323607 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7be47ab-2bb9-4335-9cdd-c457214bffcc-catalog-content\") pod \"community-operators-vr4sc\" (UID: \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\") " pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.323992 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7be47ab-2bb9-4335-9cdd-c457214bffcc-utilities\") pod \"community-operators-vr4sc\" (UID: \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\") " pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.324028 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7be47ab-2bb9-4335-9cdd-c457214bffcc-catalog-content\") pod \"community-operators-vr4sc\" (UID: \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\") " pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.363242 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zm2nf\" (UniqueName: \"kubernetes.io/projected/f7be47ab-2bb9-4335-9cdd-c457214bffcc-kube-api-access-zm2nf\") pod \"community-operators-vr4sc\" (UID: \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\") " pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.390328 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:27:09 crc kubenswrapper[4982]: E1205 20:27:09.390780 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:27:09 crc kubenswrapper[4982]: I1205 20:27:09.460767 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:10 crc kubenswrapper[4982]: I1205 20:27:10.052341 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vr4sc"] Dec 05 20:27:10 crc kubenswrapper[4982]: W1205 20:27:10.061827 4982 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7be47ab_2bb9_4335_9cdd_c457214bffcc.slice/crio-3923d71067ed6b92651bf92a5a6e9c9849bfc0f114dcd1221355e247e1d843b6 WatchSource:0}: Error finding container 3923d71067ed6b92651bf92a5a6e9c9849bfc0f114dcd1221355e247e1d843b6: Status 404 returned error can't find the container with id 3923d71067ed6b92651bf92a5a6e9c9849bfc0f114dcd1221355e247e1d843b6 Dec 05 20:27:10 crc kubenswrapper[4982]: I1205 20:27:10.865746 4982 generic.go:334] "Generic (PLEG): container finished" podID="f7be47ab-2bb9-4335-9cdd-c457214bffcc" containerID="7dc6ff652777d287ed61082bbb7e91f18d142dc97e21b8de64c00653378cefa4" exitCode=0 Dec 05 20:27:10 crc kubenswrapper[4982]: I1205 20:27:10.865792 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vr4sc" event={"ID":"f7be47ab-2bb9-4335-9cdd-c457214bffcc","Type":"ContainerDied","Data":"7dc6ff652777d287ed61082bbb7e91f18d142dc97e21b8de64c00653378cefa4"} Dec 05 20:27:10 crc kubenswrapper[4982]: I1205 20:27:10.866023 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vr4sc" event={"ID":"f7be47ab-2bb9-4335-9cdd-c457214bffcc","Type":"ContainerStarted","Data":"3923d71067ed6b92651bf92a5a6e9c9849bfc0f114dcd1221355e247e1d843b6"} Dec 05 20:27:14 crc kubenswrapper[4982]: I1205 20:27:14.925750 4982 generic.go:334] "Generic (PLEG): container finished" podID="f7be47ab-2bb9-4335-9cdd-c457214bffcc" containerID="30344304af9df9d08daf96787f419666552b31b22bc1c1f141ccb9aab0e1f718" exitCode=0 Dec 05 20:27:14 crc kubenswrapper[4982]: I1205 20:27:14.926440 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vr4sc" event={"ID":"f7be47ab-2bb9-4335-9cdd-c457214bffcc","Type":"ContainerDied","Data":"30344304af9df9d08daf96787f419666552b31b22bc1c1f141ccb9aab0e1f718"} Dec 05 20:27:14 crc kubenswrapper[4982]: I1205 20:27:14.931746 4982 generic.go:334] "Generic (PLEG): container finished" podID="d01440ea-1906-42a5-95e4-faa41adcb732" containerID="02f55e83775092a3994dd9aea92eb874b733a86c12ab1d4c6922d586a1db680a" exitCode=0 Dec 05 20:27:14 crc kubenswrapper[4982]: I1205 20:27:14.931789 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qbd6m" event={"ID":"d01440ea-1906-42a5-95e4-faa41adcb732","Type":"ContainerDied","Data":"02f55e83775092a3994dd9aea92eb874b733a86c12ab1d4c6922d586a1db680a"} Dec 05 20:27:15 crc kubenswrapper[4982]: I1205 20:27:15.947710 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vr4sc" event={"ID":"f7be47ab-2bb9-4335-9cdd-c457214bffcc","Type":"ContainerStarted","Data":"d32d54d599b63033bf07ec1467b5a61e90a2ba7d25b77cf3cb698f793da74310"} Dec 05 20:27:15 crc kubenswrapper[4982]: I1205 20:27:15.951269 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qbd6m" event={"ID":"d01440ea-1906-42a5-95e4-faa41adcb732","Type":"ContainerStarted","Data":"cca36d505407769c1fb92f613c0c22d57b137e017963140bc305e628de5d4569"} Dec 05 20:27:15 crc kubenswrapper[4982]: I1205 20:27:15.974768 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vr4sc" podStartSLOduration=2.374741533 podStartE2EDuration="6.974749279s" podCreationTimestamp="2025-12-05 20:27:09 +0000 UTC" firstStartedPulling="2025-12-05 20:27:10.868007827 +0000 UTC m=+4409.749893822" lastFinishedPulling="2025-12-05 20:27:15.468015573 +0000 UTC m=+4414.349901568" observedRunningTime="2025-12-05 20:27:15.962565027 +0000 UTC m=+4414.844451032" watchObservedRunningTime="2025-12-05 20:27:15.974749279 +0000 UTC m=+4414.856635284" Dec 05 20:27:15 crc kubenswrapper[4982]: I1205 20:27:15.994684 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qbd6m" podStartSLOduration=3.445703041 podStartE2EDuration="9.994664063s" podCreationTimestamp="2025-12-05 20:27:06 +0000 UTC" firstStartedPulling="2025-12-05 20:27:08.846833349 +0000 UTC m=+4407.728719344" lastFinishedPulling="2025-12-05 20:27:15.395794371 +0000 UTC m=+4414.277680366" observedRunningTime="2025-12-05 20:27:15.986018049 +0000 UTC m=+4414.867904054" watchObservedRunningTime="2025-12-05 20:27:15.994664063 +0000 UTC m=+4414.876550048" Dec 05 20:27:17 crc kubenswrapper[4982]: I1205 20:27:17.050283 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:17 crc kubenswrapper[4982]: I1205 20:27:17.050657 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:18 crc kubenswrapper[4982]: I1205 20:27:18.109396 4982 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-qbd6m" podUID="d01440ea-1906-42a5-95e4-faa41adcb732" containerName="registry-server" probeResult="failure" output=< Dec 05 20:27:18 crc kubenswrapper[4982]: timeout: failed to connect service ":50051" within 1s Dec 05 20:27:18 crc kubenswrapper[4982]: > Dec 05 20:27:19 crc kubenswrapper[4982]: I1205 20:27:19.461653 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:19 crc kubenswrapper[4982]: I1205 20:27:19.461929 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:19 crc kubenswrapper[4982]: I1205 20:27:19.519945 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:21 crc kubenswrapper[4982]: I1205 20:27:21.402648 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:27:21 crc kubenswrapper[4982]: E1205 20:27:21.402961 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:27:27 crc kubenswrapper[4982]: I1205 20:27:27.114501 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:27 crc kubenswrapper[4982]: I1205 20:27:27.196589 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qbd6m" Dec 05 20:27:27 crc kubenswrapper[4982]: I1205 20:27:27.739342 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qbd6m"] Dec 05 20:27:27 crc kubenswrapper[4982]: I1205 20:27:27.907566 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dv6hf"] Dec 05 20:27:27 crc kubenswrapper[4982]: I1205 20:27:27.907893 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dv6hf" podUID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" containerName="registry-server" containerID="cri-o://021d239e272cac7d40f8f533aefdda7d9351d3610dcd89804c0898ac46d95b67" gracePeriod=2 Dec 05 20:27:27 crc kubenswrapper[4982]: E1205 20:27:27.971640 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 021d239e272cac7d40f8f533aefdda7d9351d3610dcd89804c0898ac46d95b67 is running failed: container process not found" containerID="021d239e272cac7d40f8f533aefdda7d9351d3610dcd89804c0898ac46d95b67" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 20:27:27 crc kubenswrapper[4982]: E1205 20:27:27.972823 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 021d239e272cac7d40f8f533aefdda7d9351d3610dcd89804c0898ac46d95b67 is running failed: container process not found" containerID="021d239e272cac7d40f8f533aefdda7d9351d3610dcd89804c0898ac46d95b67" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 20:27:27 crc kubenswrapper[4982]: E1205 20:27:27.973314 4982 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 021d239e272cac7d40f8f533aefdda7d9351d3610dcd89804c0898ac46d95b67 is running failed: container process not found" containerID="021d239e272cac7d40f8f533aefdda7d9351d3610dcd89804c0898ac46d95b67" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 20:27:27 crc kubenswrapper[4982]: E1205 20:27:27.973353 4982 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 021d239e272cac7d40f8f533aefdda7d9351d3610dcd89804c0898ac46d95b67 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-dv6hf" podUID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" containerName="registry-server" Dec 05 20:27:28 crc kubenswrapper[4982]: I1205 20:27:28.100669 4982 generic.go:334] "Generic (PLEG): container finished" podID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" containerID="021d239e272cac7d40f8f533aefdda7d9351d3610dcd89804c0898ac46d95b67" exitCode=0 Dec 05 20:27:28 crc kubenswrapper[4982]: I1205 20:27:28.100770 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dv6hf" event={"ID":"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6","Type":"ContainerDied","Data":"021d239e272cac7d40f8f533aefdda7d9351d3610dcd89804c0898ac46d95b67"} Dec 05 20:27:28 crc kubenswrapper[4982]: I1205 20:27:28.540488 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 20:27:28 crc kubenswrapper[4982]: I1205 20:27:28.698738 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-utilities\") pod \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\" (UID: \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\") " Dec 05 20:27:28 crc kubenswrapper[4982]: I1205 20:27:28.698815 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-catalog-content\") pod \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\" (UID: \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\") " Dec 05 20:27:28 crc kubenswrapper[4982]: I1205 20:27:28.698834 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvfvx\" (UniqueName: \"kubernetes.io/projected/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-kube-api-access-kvfvx\") pod \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\" (UID: \"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6\") " Dec 05 20:27:28 crc kubenswrapper[4982]: I1205 20:27:28.700093 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-utilities" (OuterVolumeSpecName: "utilities") pod "0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" (UID: "0b347bc0-6d6c-4ed1-8db0-3a56934f27c6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:27:28 crc kubenswrapper[4982]: I1205 20:27:28.705382 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-kube-api-access-kvfvx" (OuterVolumeSpecName: "kube-api-access-kvfvx") pod "0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" (UID: "0b347bc0-6d6c-4ed1-8db0-3a56934f27c6"). InnerVolumeSpecName "kube-api-access-kvfvx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:27:28 crc kubenswrapper[4982]: I1205 20:27:28.771953 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" (UID: "0b347bc0-6d6c-4ed1-8db0-3a56934f27c6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:27:28 crc kubenswrapper[4982]: I1205 20:27:28.801523 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 20:27:28 crc kubenswrapper[4982]: I1205 20:27:28.801575 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 20:27:28 crc kubenswrapper[4982]: I1205 20:27:28.801590 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvfvx\" (UniqueName: \"kubernetes.io/projected/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6-kube-api-access-kvfvx\") on node \"crc\" DevicePath \"\"" Dec 05 20:27:29 crc kubenswrapper[4982]: I1205 20:27:29.115162 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dv6hf" event={"ID":"0b347bc0-6d6c-4ed1-8db0-3a56934f27c6","Type":"ContainerDied","Data":"756bb7cfa8cd75a3e269e6addfb36c99661c39346b9d725177864dc3da43ed15"} Dec 05 20:27:29 crc kubenswrapper[4982]: I1205 20:27:29.115220 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dv6hf" Dec 05 20:27:29 crc kubenswrapper[4982]: I1205 20:27:29.115230 4982 scope.go:117] "RemoveContainer" containerID="021d239e272cac7d40f8f533aefdda7d9351d3610dcd89804c0898ac46d95b67" Dec 05 20:27:29 crc kubenswrapper[4982]: I1205 20:27:29.145746 4982 scope.go:117] "RemoveContainer" containerID="d66d2aa8f6e2f6e2d33c283532487bbcf24bad6cc1bed6afa10c0ec200698fcf" Dec 05 20:27:29 crc kubenswrapper[4982]: I1205 20:27:29.158918 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dv6hf"] Dec 05 20:27:29 crc kubenswrapper[4982]: I1205 20:27:29.167573 4982 scope.go:117] "RemoveContainer" containerID="c8f773fc5d4b591229d6825696489487ebd6671ca057432bb1cab7d28d8567a9" Dec 05 20:27:29 crc kubenswrapper[4982]: I1205 20:27:29.173957 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dv6hf"] Dec 05 20:27:29 crc kubenswrapper[4982]: I1205 20:27:29.401602 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" path="/var/lib/kubelet/pods/0b347bc0-6d6c-4ed1-8db0-3a56934f27c6/volumes" Dec 05 20:27:29 crc kubenswrapper[4982]: I1205 20:27:29.521085 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:31 crc kubenswrapper[4982]: I1205 20:27:31.908518 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vr4sc"] Dec 05 20:27:31 crc kubenswrapper[4982]: I1205 20:27:31.909256 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vr4sc" podUID="f7be47ab-2bb9-4335-9cdd-c457214bffcc" containerName="registry-server" containerID="cri-o://d32d54d599b63033bf07ec1467b5a61e90a2ba7d25b77cf3cb698f793da74310" gracePeriod=2 Dec 05 20:27:32 crc kubenswrapper[4982]: I1205 20:27:32.151326 4982 generic.go:334] "Generic (PLEG): container finished" podID="f7be47ab-2bb9-4335-9cdd-c457214bffcc" containerID="d32d54d599b63033bf07ec1467b5a61e90a2ba7d25b77cf3cb698f793da74310" exitCode=0 Dec 05 20:27:32 crc kubenswrapper[4982]: I1205 20:27:32.151402 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vr4sc" event={"ID":"f7be47ab-2bb9-4335-9cdd-c457214bffcc","Type":"ContainerDied","Data":"d32d54d599b63033bf07ec1467b5a61e90a2ba7d25b77cf3cb698f793da74310"} Dec 05 20:27:32 crc kubenswrapper[4982]: I1205 20:27:32.592396 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:32 crc kubenswrapper[4982]: I1205 20:27:32.682545 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zm2nf\" (UniqueName: \"kubernetes.io/projected/f7be47ab-2bb9-4335-9cdd-c457214bffcc-kube-api-access-zm2nf\") pod \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\" (UID: \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\") " Dec 05 20:27:32 crc kubenswrapper[4982]: I1205 20:27:32.682691 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7be47ab-2bb9-4335-9cdd-c457214bffcc-catalog-content\") pod \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\" (UID: \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\") " Dec 05 20:27:32 crc kubenswrapper[4982]: I1205 20:27:32.682779 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7be47ab-2bb9-4335-9cdd-c457214bffcc-utilities\") pod \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\" (UID: \"f7be47ab-2bb9-4335-9cdd-c457214bffcc\") " Dec 05 20:27:32 crc kubenswrapper[4982]: I1205 20:27:32.683681 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7be47ab-2bb9-4335-9cdd-c457214bffcc-utilities" (OuterVolumeSpecName: "utilities") pod "f7be47ab-2bb9-4335-9cdd-c457214bffcc" (UID: "f7be47ab-2bb9-4335-9cdd-c457214bffcc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:27:32 crc kubenswrapper[4982]: I1205 20:27:32.690321 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7be47ab-2bb9-4335-9cdd-c457214bffcc-kube-api-access-zm2nf" (OuterVolumeSpecName: "kube-api-access-zm2nf") pod "f7be47ab-2bb9-4335-9cdd-c457214bffcc" (UID: "f7be47ab-2bb9-4335-9cdd-c457214bffcc"). InnerVolumeSpecName "kube-api-access-zm2nf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:27:32 crc kubenswrapper[4982]: I1205 20:27:32.731094 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7be47ab-2bb9-4335-9cdd-c457214bffcc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f7be47ab-2bb9-4335-9cdd-c457214bffcc" (UID: "f7be47ab-2bb9-4335-9cdd-c457214bffcc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:27:32 crc kubenswrapper[4982]: I1205 20:27:32.785019 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f7be47ab-2bb9-4335-9cdd-c457214bffcc-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 20:27:32 crc kubenswrapper[4982]: I1205 20:27:32.785050 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zm2nf\" (UniqueName: \"kubernetes.io/projected/f7be47ab-2bb9-4335-9cdd-c457214bffcc-kube-api-access-zm2nf\") on node \"crc\" DevicePath \"\"" Dec 05 20:27:32 crc kubenswrapper[4982]: I1205 20:27:32.785061 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f7be47ab-2bb9-4335-9cdd-c457214bffcc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 20:27:33 crc kubenswrapper[4982]: I1205 20:27:33.165269 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vr4sc" event={"ID":"f7be47ab-2bb9-4335-9cdd-c457214bffcc","Type":"ContainerDied","Data":"3923d71067ed6b92651bf92a5a6e9c9849bfc0f114dcd1221355e247e1d843b6"} Dec 05 20:27:33 crc kubenswrapper[4982]: I1205 20:27:33.166883 4982 scope.go:117] "RemoveContainer" containerID="d32d54d599b63033bf07ec1467b5a61e90a2ba7d25b77cf3cb698f793da74310" Dec 05 20:27:33 crc kubenswrapper[4982]: I1205 20:27:33.165360 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vr4sc" Dec 05 20:27:33 crc kubenswrapper[4982]: I1205 20:27:33.199008 4982 scope.go:117] "RemoveContainer" containerID="30344304af9df9d08daf96787f419666552b31b22bc1c1f141ccb9aab0e1f718" Dec 05 20:27:33 crc kubenswrapper[4982]: I1205 20:27:33.209923 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vr4sc"] Dec 05 20:27:33 crc kubenswrapper[4982]: I1205 20:27:33.222206 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vr4sc"] Dec 05 20:27:33 crc kubenswrapper[4982]: I1205 20:27:33.228634 4982 scope.go:117] "RemoveContainer" containerID="7dc6ff652777d287ed61082bbb7e91f18d142dc97e21b8de64c00653378cefa4" Dec 05 20:27:33 crc kubenswrapper[4982]: I1205 20:27:33.406252 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7be47ab-2bb9-4335-9cdd-c457214bffcc" path="/var/lib/kubelet/pods/f7be47ab-2bb9-4335-9cdd-c457214bffcc/volumes" Dec 05 20:27:36 crc kubenswrapper[4982]: I1205 20:27:36.390661 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:27:36 crc kubenswrapper[4982]: E1205 20:27:36.391737 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.824672 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zwm7b"] Dec 05 20:27:45 crc kubenswrapper[4982]: E1205 20:27:45.826167 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7be47ab-2bb9-4335-9cdd-c457214bffcc" containerName="extract-utilities" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.826183 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7be47ab-2bb9-4335-9cdd-c457214bffcc" containerName="extract-utilities" Dec 05 20:27:45 crc kubenswrapper[4982]: E1205 20:27:45.826216 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7be47ab-2bb9-4335-9cdd-c457214bffcc" containerName="registry-server" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.826225 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7be47ab-2bb9-4335-9cdd-c457214bffcc" containerName="registry-server" Dec 05 20:27:45 crc kubenswrapper[4982]: E1205 20:27:45.826240 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" containerName="extract-content" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.826245 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" containerName="extract-content" Dec 05 20:27:45 crc kubenswrapper[4982]: E1205 20:27:45.826265 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" containerName="extract-utilities" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.826272 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" containerName="extract-utilities" Dec 05 20:27:45 crc kubenswrapper[4982]: E1205 20:27:45.826286 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7be47ab-2bb9-4335-9cdd-c457214bffcc" containerName="extract-content" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.826292 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7be47ab-2bb9-4335-9cdd-c457214bffcc" containerName="extract-content" Dec 05 20:27:45 crc kubenswrapper[4982]: E1205 20:27:45.826302 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" containerName="registry-server" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.826309 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" containerName="registry-server" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.826510 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b347bc0-6d6c-4ed1-8db0-3a56934f27c6" containerName="registry-server" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.826521 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7be47ab-2bb9-4335-9cdd-c457214bffcc" containerName="registry-server" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.828014 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.850063 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zwm7b"] Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.967238 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-utilities\") pod \"redhat-marketplace-zwm7b\" (UID: \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\") " pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.967653 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-catalog-content\") pod \"redhat-marketplace-zwm7b\" (UID: \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\") " pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:45 crc kubenswrapper[4982]: I1205 20:27:45.967690 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdkhq\" (UniqueName: \"kubernetes.io/projected/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-kube-api-access-mdkhq\") pod \"redhat-marketplace-zwm7b\" (UID: \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\") " pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:46 crc kubenswrapper[4982]: I1205 20:27:46.069311 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-utilities\") pod \"redhat-marketplace-zwm7b\" (UID: \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\") " pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:46 crc kubenswrapper[4982]: I1205 20:27:46.069400 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-catalog-content\") pod \"redhat-marketplace-zwm7b\" (UID: \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\") " pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:46 crc kubenswrapper[4982]: I1205 20:27:46.069435 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdkhq\" (UniqueName: \"kubernetes.io/projected/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-kube-api-access-mdkhq\") pod \"redhat-marketplace-zwm7b\" (UID: \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\") " pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:46 crc kubenswrapper[4982]: I1205 20:27:46.070027 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-utilities\") pod \"redhat-marketplace-zwm7b\" (UID: \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\") " pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:46 crc kubenswrapper[4982]: I1205 20:27:46.070060 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-catalog-content\") pod \"redhat-marketplace-zwm7b\" (UID: \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\") " pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:46 crc kubenswrapper[4982]: I1205 20:27:46.107540 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdkhq\" (UniqueName: \"kubernetes.io/projected/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-kube-api-access-mdkhq\") pod \"redhat-marketplace-zwm7b\" (UID: \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\") " pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:46 crc kubenswrapper[4982]: I1205 20:27:46.146319 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:46 crc kubenswrapper[4982]: I1205 20:27:46.606579 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zwm7b"] Dec 05 20:27:47 crc kubenswrapper[4982]: I1205 20:27:47.312918 4982 generic.go:334] "Generic (PLEG): container finished" podID="b99fea4b-b38b-4cb7-939b-7fc9d02da33e" containerID="2bad550edf2062f319d497667eba0badb38d551a7b94f25c43936d64438e7216" exitCode=0 Dec 05 20:27:47 crc kubenswrapper[4982]: I1205 20:27:47.312993 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwm7b" event={"ID":"b99fea4b-b38b-4cb7-939b-7fc9d02da33e","Type":"ContainerDied","Data":"2bad550edf2062f319d497667eba0badb38d551a7b94f25c43936d64438e7216"} Dec 05 20:27:47 crc kubenswrapper[4982]: I1205 20:27:47.314130 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwm7b" event={"ID":"b99fea4b-b38b-4cb7-939b-7fc9d02da33e","Type":"ContainerStarted","Data":"2c354f77dceac33150bbb855206990cda9ee5cb44bb76953da55abdf7091e8e1"} Dec 05 20:27:48 crc kubenswrapper[4982]: I1205 20:27:48.324716 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwm7b" event={"ID":"b99fea4b-b38b-4cb7-939b-7fc9d02da33e","Type":"ContainerStarted","Data":"a88f8ee5d9202b7eb8cd13142c58f0724487330a23af1bd5aafc650ff3b62eee"} Dec 05 20:27:49 crc kubenswrapper[4982]: I1205 20:27:49.336102 4982 generic.go:334] "Generic (PLEG): container finished" podID="b99fea4b-b38b-4cb7-939b-7fc9d02da33e" containerID="a88f8ee5d9202b7eb8cd13142c58f0724487330a23af1bd5aafc650ff3b62eee" exitCode=0 Dec 05 20:27:49 crc kubenswrapper[4982]: I1205 20:27:49.336161 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwm7b" event={"ID":"b99fea4b-b38b-4cb7-939b-7fc9d02da33e","Type":"ContainerDied","Data":"a88f8ee5d9202b7eb8cd13142c58f0724487330a23af1bd5aafc650ff3b62eee"} Dec 05 20:27:49 crc kubenswrapper[4982]: I1205 20:27:49.390514 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:27:49 crc kubenswrapper[4982]: E1205 20:27:49.390756 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:27:50 crc kubenswrapper[4982]: I1205 20:27:50.349054 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwm7b" event={"ID":"b99fea4b-b38b-4cb7-939b-7fc9d02da33e","Type":"ContainerStarted","Data":"fb5de61454ca94a8e0b4bcd534c119d4d041e13ad9b305e2fbcbdc5d5afa285f"} Dec 05 20:27:50 crc kubenswrapper[4982]: I1205 20:27:50.377561 4982 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zwm7b" podStartSLOduration=2.913702861 podStartE2EDuration="5.377539596s" podCreationTimestamp="2025-12-05 20:27:45 +0000 UTC" firstStartedPulling="2025-12-05 20:27:47.315055633 +0000 UTC m=+4446.196941638" lastFinishedPulling="2025-12-05 20:27:49.778892368 +0000 UTC m=+4448.660778373" observedRunningTime="2025-12-05 20:27:50.367495987 +0000 UTC m=+4449.249382002" watchObservedRunningTime="2025-12-05 20:27:50.377539596 +0000 UTC m=+4449.259425601" Dec 05 20:27:56 crc kubenswrapper[4982]: I1205 20:27:56.147253 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:56 crc kubenswrapper[4982]: I1205 20:27:56.147770 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:56 crc kubenswrapper[4982]: I1205 20:27:56.195059 4982 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:56 crc kubenswrapper[4982]: I1205 20:27:56.473913 4982 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:56 crc kubenswrapper[4982]: I1205 20:27:56.532211 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zwm7b"] Dec 05 20:27:58 crc kubenswrapper[4982]: I1205 20:27:58.441876 4982 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-zwm7b" podUID="b99fea4b-b38b-4cb7-939b-7fc9d02da33e" containerName="registry-server" containerID="cri-o://fb5de61454ca94a8e0b4bcd534c119d4d041e13ad9b305e2fbcbdc5d5afa285f" gracePeriod=2 Dec 05 20:27:59 crc kubenswrapper[4982]: I1205 20:27:59.453962 4982 generic.go:334] "Generic (PLEG): container finished" podID="b99fea4b-b38b-4cb7-939b-7fc9d02da33e" containerID="fb5de61454ca94a8e0b4bcd534c119d4d041e13ad9b305e2fbcbdc5d5afa285f" exitCode=0 Dec 05 20:27:59 crc kubenswrapper[4982]: I1205 20:27:59.454022 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwm7b" event={"ID":"b99fea4b-b38b-4cb7-939b-7fc9d02da33e","Type":"ContainerDied","Data":"fb5de61454ca94a8e0b4bcd534c119d4d041e13ad9b305e2fbcbdc5d5afa285f"} Dec 05 20:27:59 crc kubenswrapper[4982]: I1205 20:27:59.636574 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:27:59 crc kubenswrapper[4982]: I1205 20:27:59.781607 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdkhq\" (UniqueName: \"kubernetes.io/projected/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-kube-api-access-mdkhq\") pod \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\" (UID: \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\") " Dec 05 20:27:59 crc kubenswrapper[4982]: I1205 20:27:59.781793 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-catalog-content\") pod \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\" (UID: \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\") " Dec 05 20:27:59 crc kubenswrapper[4982]: I1205 20:27:59.781858 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-utilities\") pod \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\" (UID: \"b99fea4b-b38b-4cb7-939b-7fc9d02da33e\") " Dec 05 20:27:59 crc kubenswrapper[4982]: I1205 20:27:59.782772 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-utilities" (OuterVolumeSpecName: "utilities") pod "b99fea4b-b38b-4cb7-939b-7fc9d02da33e" (UID: "b99fea4b-b38b-4cb7-939b-7fc9d02da33e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:27:59 crc kubenswrapper[4982]: I1205 20:27:59.789233 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-kube-api-access-mdkhq" (OuterVolumeSpecName: "kube-api-access-mdkhq") pod "b99fea4b-b38b-4cb7-939b-7fc9d02da33e" (UID: "b99fea4b-b38b-4cb7-939b-7fc9d02da33e"). InnerVolumeSpecName "kube-api-access-mdkhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:27:59 crc kubenswrapper[4982]: I1205 20:27:59.806672 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b99fea4b-b38b-4cb7-939b-7fc9d02da33e" (UID: "b99fea4b-b38b-4cb7-939b-7fc9d02da33e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 20:27:59 crc kubenswrapper[4982]: I1205 20:27:59.884547 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdkhq\" (UniqueName: \"kubernetes.io/projected/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-kube-api-access-mdkhq\") on node \"crc\" DevicePath \"\"" Dec 05 20:27:59 crc kubenswrapper[4982]: I1205 20:27:59.884582 4982 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 20:27:59 crc kubenswrapper[4982]: I1205 20:27:59.884597 4982 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b99fea4b-b38b-4cb7-939b-7fc9d02da33e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 20:28:00 crc kubenswrapper[4982]: I1205 20:28:00.464697 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zwm7b" event={"ID":"b99fea4b-b38b-4cb7-939b-7fc9d02da33e","Type":"ContainerDied","Data":"2c354f77dceac33150bbb855206990cda9ee5cb44bb76953da55abdf7091e8e1"} Dec 05 20:28:00 crc kubenswrapper[4982]: I1205 20:28:00.465724 4982 scope.go:117] "RemoveContainer" containerID="fb5de61454ca94a8e0b4bcd534c119d4d041e13ad9b305e2fbcbdc5d5afa285f" Dec 05 20:28:00 crc kubenswrapper[4982]: I1205 20:28:00.464745 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zwm7b" Dec 05 20:28:00 crc kubenswrapper[4982]: I1205 20:28:00.488133 4982 scope.go:117] "RemoveContainer" containerID="a88f8ee5d9202b7eb8cd13142c58f0724487330a23af1bd5aafc650ff3b62eee" Dec 05 20:28:00 crc kubenswrapper[4982]: I1205 20:28:00.511165 4982 scope.go:117] "RemoveContainer" containerID="2bad550edf2062f319d497667eba0badb38d551a7b94f25c43936d64438e7216" Dec 05 20:28:00 crc kubenswrapper[4982]: I1205 20:28:00.511757 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-zwm7b"] Dec 05 20:28:00 crc kubenswrapper[4982]: I1205 20:28:00.524142 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-zwm7b"] Dec 05 20:28:01 crc kubenswrapper[4982]: I1205 20:28:01.399869 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:28:01 crc kubenswrapper[4982]: E1205 20:28:01.400271 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:28:01 crc kubenswrapper[4982]: I1205 20:28:01.405813 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b99fea4b-b38b-4cb7-939b-7fc9d02da33e" path="/var/lib/kubelet/pods/b99fea4b-b38b-4cb7-939b-7fc9d02da33e/volumes" Dec 05 20:28:16 crc kubenswrapper[4982]: I1205 20:28:16.395129 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:28:16 crc kubenswrapper[4982]: E1205 20:28:16.403833 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:28:27 crc kubenswrapper[4982]: I1205 20:28:27.391651 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:28:27 crc kubenswrapper[4982]: E1205 20:28:27.392409 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:28:38 crc kubenswrapper[4982]: I1205 20:28:38.390414 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:28:38 crc kubenswrapper[4982]: E1205 20:28:38.391431 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:28:50 crc kubenswrapper[4982]: I1205 20:28:50.390926 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:28:50 crc kubenswrapper[4982]: E1205 20:28:50.391731 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:29:04 crc kubenswrapper[4982]: I1205 20:29:04.390268 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:29:04 crc kubenswrapper[4982]: E1205 20:29:04.391057 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:29:18 crc kubenswrapper[4982]: I1205 20:29:18.390212 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:29:18 crc kubenswrapper[4982]: E1205 20:29:18.390952 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:29:31 crc kubenswrapper[4982]: I1205 20:29:31.399908 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:29:31 crc kubenswrapper[4982]: E1205 20:29:31.400949 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:29:45 crc kubenswrapper[4982]: I1205 20:29:45.399200 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:29:45 crc kubenswrapper[4982]: E1205 20:29:45.400008 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.188658 4982 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf"] Dec 05 20:30:00 crc kubenswrapper[4982]: E1205 20:30:00.189612 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b99fea4b-b38b-4cb7-939b-7fc9d02da33e" containerName="registry-server" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.189626 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b99fea4b-b38b-4cb7-939b-7fc9d02da33e" containerName="registry-server" Dec 05 20:30:00 crc kubenswrapper[4982]: E1205 20:30:00.189745 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b99fea4b-b38b-4cb7-939b-7fc9d02da33e" containerName="extract-content" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.189751 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b99fea4b-b38b-4cb7-939b-7fc9d02da33e" containerName="extract-content" Dec 05 20:30:00 crc kubenswrapper[4982]: E1205 20:30:00.189765 4982 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b99fea4b-b38b-4cb7-939b-7fc9d02da33e" containerName="extract-utilities" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.189771 4982 state_mem.go:107] "Deleted CPUSet assignment" podUID="b99fea4b-b38b-4cb7-939b-7fc9d02da33e" containerName="extract-utilities" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.189981 4982 memory_manager.go:354] "RemoveStaleState removing state" podUID="b99fea4b-b38b-4cb7-939b-7fc9d02da33e" containerName="registry-server" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.190865 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.193576 4982 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.193582 4982 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.267978 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf"] Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.390649 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.391080 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b77fd83-c796-44d5-81fe-04101caabee5-secret-volume\") pod \"collect-profiles-29416110-cdvrf\" (UID: \"5b77fd83-c796-44d5-81fe-04101caabee5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.391114 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gg5ch\" (UniqueName: \"kubernetes.io/projected/5b77fd83-c796-44d5-81fe-04101caabee5-kube-api-access-gg5ch\") pod \"collect-profiles-29416110-cdvrf\" (UID: \"5b77fd83-c796-44d5-81fe-04101caabee5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:00 crc kubenswrapper[4982]: E1205 20:30:00.391269 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.392287 4982 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b77fd83-c796-44d5-81fe-04101caabee5-config-volume\") pod \"collect-profiles-29416110-cdvrf\" (UID: \"5b77fd83-c796-44d5-81fe-04101caabee5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.494359 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b77fd83-c796-44d5-81fe-04101caabee5-secret-volume\") pod \"collect-profiles-29416110-cdvrf\" (UID: \"5b77fd83-c796-44d5-81fe-04101caabee5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.494404 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gg5ch\" (UniqueName: \"kubernetes.io/projected/5b77fd83-c796-44d5-81fe-04101caabee5-kube-api-access-gg5ch\") pod \"collect-profiles-29416110-cdvrf\" (UID: \"5b77fd83-c796-44d5-81fe-04101caabee5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.494538 4982 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b77fd83-c796-44d5-81fe-04101caabee5-config-volume\") pod \"collect-profiles-29416110-cdvrf\" (UID: \"5b77fd83-c796-44d5-81fe-04101caabee5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.495494 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b77fd83-c796-44d5-81fe-04101caabee5-config-volume\") pod \"collect-profiles-29416110-cdvrf\" (UID: \"5b77fd83-c796-44d5-81fe-04101caabee5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.500204 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b77fd83-c796-44d5-81fe-04101caabee5-secret-volume\") pod \"collect-profiles-29416110-cdvrf\" (UID: \"5b77fd83-c796-44d5-81fe-04101caabee5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.510472 4982 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gg5ch\" (UniqueName: \"kubernetes.io/projected/5b77fd83-c796-44d5-81fe-04101caabee5-kube-api-access-gg5ch\") pod \"collect-profiles-29416110-cdvrf\" (UID: \"5b77fd83-c796-44d5-81fe-04101caabee5\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.523464 4982 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:00 crc kubenswrapper[4982]: I1205 20:30:00.973204 4982 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf"] Dec 05 20:30:01 crc kubenswrapper[4982]: I1205 20:30:01.681620 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" event={"ID":"5b77fd83-c796-44d5-81fe-04101caabee5","Type":"ContainerStarted","Data":"5df1b5f524e4b23b769e76d1e24d2d403b025972e610d1deb5aa2a918b114363"} Dec 05 20:30:02 crc kubenswrapper[4982]: I1205 20:30:02.692744 4982 generic.go:334] "Generic (PLEG): container finished" podID="5b77fd83-c796-44d5-81fe-04101caabee5" containerID="49f0c85df24ddc883a8a5e17f7de55d22baffb8058ae9d2ff93f02804359b031" exitCode=0 Dec 05 20:30:02 crc kubenswrapper[4982]: I1205 20:30:02.692801 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" event={"ID":"5b77fd83-c796-44d5-81fe-04101caabee5","Type":"ContainerDied","Data":"49f0c85df24ddc883a8a5e17f7de55d22baffb8058ae9d2ff93f02804359b031"} Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.209637 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.275172 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b77fd83-c796-44d5-81fe-04101caabee5-config-volume\") pod \"5b77fd83-c796-44d5-81fe-04101caabee5\" (UID: \"5b77fd83-c796-44d5-81fe-04101caabee5\") " Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.275659 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gg5ch\" (UniqueName: \"kubernetes.io/projected/5b77fd83-c796-44d5-81fe-04101caabee5-kube-api-access-gg5ch\") pod \"5b77fd83-c796-44d5-81fe-04101caabee5\" (UID: \"5b77fd83-c796-44d5-81fe-04101caabee5\") " Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.275807 4982 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b77fd83-c796-44d5-81fe-04101caabee5-secret-volume\") pod \"5b77fd83-c796-44d5-81fe-04101caabee5\" (UID: \"5b77fd83-c796-44d5-81fe-04101caabee5\") " Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.276203 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b77fd83-c796-44d5-81fe-04101caabee5-config-volume" (OuterVolumeSpecName: "config-volume") pod "5b77fd83-c796-44d5-81fe-04101caabee5" (UID: "5b77fd83-c796-44d5-81fe-04101caabee5"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.276663 4982 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/5b77fd83-c796-44d5-81fe-04101caabee5-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.282170 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b77fd83-c796-44d5-81fe-04101caabee5-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "5b77fd83-c796-44d5-81fe-04101caabee5" (UID: "5b77fd83-c796-44d5-81fe-04101caabee5"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.282273 4982 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b77fd83-c796-44d5-81fe-04101caabee5-kube-api-access-gg5ch" (OuterVolumeSpecName: "kube-api-access-gg5ch") pod "5b77fd83-c796-44d5-81fe-04101caabee5" (UID: "5b77fd83-c796-44d5-81fe-04101caabee5"). InnerVolumeSpecName "kube-api-access-gg5ch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.378332 4982 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gg5ch\" (UniqueName: \"kubernetes.io/projected/5b77fd83-c796-44d5-81fe-04101caabee5-kube-api-access-gg5ch\") on node \"crc\" DevicePath \"\"" Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.378366 4982 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/5b77fd83-c796-44d5-81fe-04101caabee5-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.713672 4982 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" event={"ID":"5b77fd83-c796-44d5-81fe-04101caabee5","Type":"ContainerDied","Data":"5df1b5f524e4b23b769e76d1e24d2d403b025972e610d1deb5aa2a918b114363"} Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.714207 4982 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5df1b5f524e4b23b769e76d1e24d2d403b025972e610d1deb5aa2a918b114363" Dec 05 20:30:04 crc kubenswrapper[4982]: I1205 20:30:04.713870 4982 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416110-cdvrf" Dec 05 20:30:05 crc kubenswrapper[4982]: I1205 20:30:05.289071 4982 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc"] Dec 05 20:30:05 crc kubenswrapper[4982]: I1205 20:30:05.297827 4982 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416065-cdpjc"] Dec 05 20:30:05 crc kubenswrapper[4982]: I1205 20:30:05.406623 4982 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c44f0df7-bb79-4c72-976f-baa2ab421cc3" path="/var/lib/kubelet/pods/c44f0df7-bb79-4c72-976f-baa2ab421cc3/volumes" Dec 05 20:30:13 crc kubenswrapper[4982]: I1205 20:30:13.390353 4982 scope.go:117] "RemoveContainer" containerID="fd3cd0b2e951565f08f083b3642163ec3376278927532bcc034b7d66c80fb1b1" Dec 05 20:30:13 crc kubenswrapper[4982]: E1205 20:30:13.391678 4982 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-dldj9_openshift-machine-config-operator(6902f814-e220-41f2-887a-1831b28c14ee)\"" pod="openshift-machine-config-operator/machine-config-daemon-dldj9" podUID="6902f814-e220-41f2-887a-1831b28c14ee" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114640340024443 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114640341017361 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114627050016506 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114627050015456 5ustar corecore